Importing Libraries¶

In [14]:
import numpy as np
import matplotlib.pyplot as plt
In [15]:
import tensorflow as tf
gpus = tf.config.experimental.list_physical_devices('GPU')
for gpu in gpus:
  tf.config.experimental.set_memory_growth(gpu, True)

Importing Image Data¶

In [16]:
from tensorflow.keras.utils import image_dataset_from_directory

train_dataset = image_dataset_from_directory(
    'Dataset for CA1 part A - AY2425S1\\train', 
    labels="inferred", 
    label_mode="categorical", 
    color_mode="grayscale", 
    batch_size=None, 
    image_size=(137, 137), 
    shuffle=False
    )

train_dataset_37 = image_dataset_from_directory(
    'Dataset for CA1 part A - AY2425S1\\train', 
    labels="inferred", 
    label_mode="categorical", 
    color_mode="grayscale", 
    batch_size=200000, 
    image_size=(37, 37), 
    shuffle=False)

test_dataset_37 = image_dataset_from_directory(
    'Dataset for CA1 part A - AY2425S1\\test', 
    labels="inferred", 
    label_mode="categorical", 
    color_mode="grayscale", 
    batch_size=200000, 
    image_size=(37, 37), 
    shuffle=False)

validate_dataset_37 = image_dataset_from_directory(
    'Dataset for CA1 part A - AY2425S1\\validation', 
    labels="inferred", 
    label_mode="categorical", 
    color_mode="grayscale", 
    batch_size=200000, 
    image_size=(37, 37), 
    shuffle=False)

train_dataset_131 = image_dataset_from_directory(
    'Dataset for CA1 part A - AY2425S1\\train', 
    labels="inferred", 
    label_mode="categorical", 
    color_mode="grayscale", 
    batch_size=200000, 
    image_size=(131, 131), 
    shuffle=False)

test_dataset_131 = image_dataset_from_directory(
    'Dataset for CA1 part A - AY2425S1\\test', 
    labels="inferred", 
    label_mode="categorical", 
    color_mode="grayscale", 
    batch_size=200000, 
    image_size=(131, 131), 
    shuffle=False)

validate_dataset_131 = image_dataset_from_directory(
    'Dataset for CA1 part A - AY2425S1\\validation', 
    labels="inferred", 
    label_mode="categorical", 
    color_mode="grayscale", 
    batch_size=200000, 
    image_size=(131, 131), 
    shuffle=False)
Found 9032 files belonging to 15 classes.
Found 9032 files belonging to 15 classes.
Found 3000 files belonging to 15 classes.
Found 3000 files belonging to 15 classes.
Found 9032 files belonging to 15 classes.
Found 3000 files belonging to 15 classes.
Found 3000 files belonging to 15 classes.
In [ ]:
images = []
for i in range(0, len(train_dataset.class_names)):
    images_temp = []
    for x, y in train_dataset:
        if np.where(y)[0][0] == i:
            images_temp.append(x.numpy())
    images.append(images_temp)
In [17]:
X_train37 = []
y_train37 = []

for x, y in train_dataset_37:
    X_train37.append(x.numpy())
    y_train37.append(y.numpy())

X_test37 = []
y_test37 = []

for x, y in test_dataset_37:
    X_test37.append(x.numpy())
    y_test37.append(y.numpy())

X_validate37 = []
y_validate37 = []

for x, y in validate_dataset_37:
    X_validate37.append(x.numpy())
    y_validate37.append(y.numpy())

X_train131 = []
y_train131 = []

for x, y in train_dataset_131:
    X_train131.append(x.numpy())
    y_train131.append(y.numpy())

X_test131 = []
y_test131 = []

for x, y in test_dataset_131:
    X_test131.append(x.numpy())
    y_test131.append(y.numpy())

X_validate131 = []
y_validate131 = []

for x, y in validate_dataset_131:
    X_validate131.append(x.numpy())
    y_validate131.append(y.numpy())
In [18]:
X_train37 = np.concatenate(X_train37, axis=0)
y_train37 = np.concatenate(y_train37, axis=0)

X_test37 = np.concatenate(X_test37, axis=0)
y_test37 = np.concatenate(y_test37, axis=0)

X_validate37 = np.concatenate(X_validate37, axis=0)
y_validate37 = np.concatenate(y_validate37, axis=0)

X_train131 = np.concatenate(X_train131, axis=0)
y_train131 = np.concatenate(y_train131, axis=0)

X_test131 = np.concatenate(X_test131, axis=0)
y_test131 = np.concatenate(y_test131, axis=0)

X_validate131 = np.concatenate(X_validate131, axis=0)
y_validate131 = np.concatenate(y_validate131, axis=0)

Normalise the image data

In [19]:
X_train37 = X_train37 / 255
X_test37 = X_test37 / 255
X_validate37 = X_validate37 / 255

Exploratory Data Analysis¶

PCA to analyse dataset and flag outliers¶

After manually checking through the dataset and manually removing data which isn't supposed to belong to a class (i.e. carrots in beans), doing PCA can help us to 'double-check' if we have actually gotten all of the misplaced data.

In [9]:
from sklearn.decomposition import PCA
from sklearn.metrics import mean_squared_error

outlier_images = []
progress = 0

for class_images in images:
    pca = PCA(n_components=len(class_images))
    X_pca = pca.fit_transform([i.flatten() for i in class_images])
    X_reconstructed = pca.inverse_transform(X_pca)
    reconstruction_errors = [mean_squared_error([i.flatten() for i in class_images][i], X_reconstructed[i]) for i in range(len([i.flatten() for i in class_images]))]
    threshold = np.percentile(reconstruction_errors, 85)
    outliers_indices = np.where(reconstruction_errors > threshold)[0]
    outlier_images.append([class_images[i] for i in outliers_indices])
    progress += 1
    print(f"Finsished with {progress}/{len(images)}. Total images: {len(class_images)} Total outliers: {len(outliers_indices)}")
Finsished with 1/15. Total images: 792 Total outliers: 119
Finsished with 2/15. Total images: 720 Total outliers: 108
Finsished with 3/15. Total images: 441 Total outliers: 66
Finsished with 4/15. Total images: 868 Total outliers: 131
Finsished with 5/15. Total images: 750 Total outliers: 113
Finsished with 6/15. Total images: 503 Total outliers: 76
Finsished with 7/15. Total images: 351 Total outliers: 53
Finsished with 8/15. Total images: 256 Total outliers: 39
Finsished with 9/15. Total images: 587 Total outliers: 88
Finsished with 10/15. Total images: 812 Total outliers: 122
Finsished with 11/15. Total images: 566 Total outliers: 85
Finsished with 12/15. Total images: 377 Total outliers: 57
Finsished with 13/15. Total images: 814 Total outliers: 122
Finsished with 14/15. Total images: 248 Total outliers: 38
Finsished with 15/15. Total images: 955 Total outliers: 144
In [15]:
import matplotlib.pyplot as plt

for outlier_images_data in outlier_images:
    num_rows = 10
    num_cols = (len(outlier_images_data) + num_rows - 1) // num_rows

    fig, axes = plt.subplots(num_rows, num_cols, figsize=(16, 16))

    if num_rows == 1 and num_cols == 1:
        axes = np.array([[axes]])

    for i, ax_row in enumerate(axes):
        for j, ax in enumerate(ax_row):
            index = i * num_cols + j
            if index < len(outlier_images_data):
                ax.imshow(outlier_images_data[index], cmap='gray')
                ax.set_title(f'{reconstruction_errors[index]}')
                ax.axis('off')
            else:
                ax.axis('off')
    
    plt.show()
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image
No description has been provided for this image

For the bean dataset, the PCA has helped to detect some carrots which I missed out as outliers.

PCA can also aid in data exploration for the dataset. For some datasets like potato and capsicum, most of the images PCA has picked out are those with a light background and have only one or few of those items.

However, I won't remove those outliers as the images in the train and validation dataset have the same type of images.

In [56]:
class_counts_train = np.sum(y_train37, axis=0)
class_labels_train = [f'{train_dataset_37.class_names[i]}' for i in range(len(class_counts_train))]

class_counts_validate = np.sum(y_validate37, axis=0)
class_labels_validate = [f'{validate_dataset_37.class_names[i]}' for i in range(len(class_counts_validate))]

class_counts_test = np.sum(y_test37, axis=0)
class_labels_test = [f'{test_dataset_37.class_names[i]}' for i in range(len(class_counts_test))]

# Plotting
fig, (ax1, ax2, ax3) = plt.subplots(1,3)
fig.set_size_inches(20, 5)

ax1.bar(class_labels_train, class_counts_train, color='skyblue')
ax1.set_xlabel('Class')
ax1.set_ylabel('Counts')
ax1.set_title('Train Dataset')
ax1.xaxis.set_tick_params(rotation=90)

ax2.bar(class_labels_validate, class_counts_validate, color='skyblue')
ax2.set_xlabel('Class')
ax2.set_ylabel('Counts')
ax2.set_title('Validation Dataset')
ax2.xaxis.set_tick_params(rotation=90)

ax3.bar(class_labels_test, class_counts_test, color='skyblue')
ax3.set_xlabel('Class')
ax3.set_ylabel('Counts')
ax3.set_title('Test Dataset')
ax3.xaxis.set_tick_params(rotation=90)

plt.show()
No description has been provided for this image

As you can see, there are class imbalances in the train dataset only. Carrot and Radish has the lowest number of images in its dataset.

To help counter the class imbalance, we will use class weights when fitting the models.

In [24]:
from sklearn.utils.class_weight import compute_class_weight

def generate_class_weights(class_series): 
    class_series = np.argmax(class_series, axis=1)
  
    class_labels = np.unique(class_series)
    class_weights = compute_class_weight(class_weight='balanced', classes=class_labels, y=class_series)
    return dict(zip(class_labels, class_weights))
  
labels = np.array([label.numpy() for _, label in train_dataset_37.unbatch()])
class_weights = generate_class_weights(labels)

Creating the model for 37x37 images¶

In [25]:
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, MaxPooling2D, Dropout, BatchNormalization
In [26]:
# fix random seed for reproducibility
seed = 88
np.random.seed(seed)
In [27]:
from tensorflow.keras.optimizers import Adam

model37 = Sequential()

model37.add(Conv2D(32, (3, 3), activation='relu', input_shape=(37, 37, 1)))
model37.add(BatchNormalization())
model37.add(MaxPooling2D((2, 2)))

model37.add(Conv2D(64, (3, 3), activation='relu'))
model37.add(BatchNormalization())
model37.add(Dropout(0.1))
model37.add(MaxPooling2D((2, 2)))

model37.add(Conv2D(64, (3, 3), activation='relu'))
model37.add(BatchNormalization())
model37.add(Dropout(0.5))

model37.add(Flatten())

model37.add(Dense(100, activation='relu'))
model37.add(Dropout(0.15)) # Dropout for regularization

model37.add(Dense(50, activation='relu'))
model37.add(Dropout(0.1)) # Dropout for regularization

model37.add(Dense(15, activation='softmax'))

optimizer = Adam(learning_rate=0.00009)

model37.compile(optimizer=optimizer,
              loss='categorical_crossentropy',
              metrics=['accuracy'])

model37.summary()
Model: "sequential_1"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d_3 (Conv2D)           (None, 35, 35, 32)        320       
                                                                 
 batch_normalization_3 (Batc  (None, 35, 35, 32)       128       
 hNormalization)                                                 
                                                                 
 max_pooling2d_2 (MaxPooling  (None, 17, 17, 32)       0         
 2D)                                                             
                                                                 
 conv2d_4 (Conv2D)           (None, 15, 15, 64)        18496     
                                                                 
 batch_normalization_4 (Batc  (None, 15, 15, 64)       256       
 hNormalization)                                                 
                                                                 
 dropout_4 (Dropout)         (None, 15, 15, 64)        0         
                                                                 
 max_pooling2d_3 (MaxPooling  (None, 7, 7, 64)         0         
 2D)                                                             
                                                                 
 conv2d_5 (Conv2D)           (None, 5, 5, 64)          36928     
                                                                 
 batch_normalization_5 (Batc  (None, 5, 5, 64)         256       
 hNormalization)                                                 
                                                                 
 dropout_5 (Dropout)         (None, 5, 5, 64)          0         
                                                                 
 flatten_1 (Flatten)         (None, 1600)              0         
                                                                 
 dense_3 (Dense)             (None, 100)               160100    
                                                                 
 dropout_6 (Dropout)         (None, 100)               0         
                                                                 
 dense_4 (Dense)             (None, 50)                5050      
                                                                 
 dropout_7 (Dropout)         (None, 50)                0         
                                                                 
 dense_5 (Dense)             (None, 15)                765       
                                                                 
=================================================================
Total params: 222,299
Trainable params: 221,979
Non-trainable params: 320
_________________________________________________________________

To improve the model, I added dropout to prevent overfitting, added BatchNormalization between layers to stabilize the distributions of layer inputs and MaxPooling2D to downsample the data and extract the most important information.

The learning rate of Adam was set to 0.00009 to help reduce volatility in the loss and accuracy scores.

This model has a total of 221,979 trainable parameters

In [28]:
history37 = model37.fit(X_train37, y_train37, validation_data=(X_validate37, y_validate37), epochs=300, batch_size=50, verbose=1, class_weight=class_weights)
Epoch 1/300
181/181 [==============================] - 12s 9ms/step - loss: 3.0627 - accuracy: 0.1032 - val_loss: 2.7516 - val_accuracy: 0.0743
Epoch 2/300
181/181 [==============================] - 1s 7ms/step - loss: 2.6166 - accuracy: 0.1611 - val_loss: 2.6903 - val_accuracy: 0.1093
Epoch 3/300
181/181 [==============================] - 1s 7ms/step - loss: 2.3602 - accuracy: 0.2277 - val_loss: 2.2901 - val_accuracy: 0.2510
Epoch 4/300
181/181 [==============================] - 1s 7ms/step - loss: 2.2155 - accuracy: 0.2819 - val_loss: 1.8849 - val_accuracy: 0.4030
Epoch 5/300
181/181 [==============================] - 1s 7ms/step - loss: 2.0397 - accuracy: 0.3282 - val_loss: 1.7183 - val_accuracy: 0.4690
Epoch 6/300
181/181 [==============================] - 1s 7ms/step - loss: 1.9165 - accuracy: 0.3720 - val_loss: 1.5841 - val_accuracy: 0.5110
Epoch 7/300
181/181 [==============================] - 1s 7ms/step - loss: 1.7899 - accuracy: 0.4101 - val_loss: 1.5190 - val_accuracy: 0.5310
Epoch 8/300
181/181 [==============================] - 1s 7ms/step - loss: 1.7040 - accuracy: 0.4397 - val_loss: 1.4176 - val_accuracy: 0.5560
Epoch 9/300
181/181 [==============================] - 1s 7ms/step - loss: 1.6151 - accuracy: 0.4665 - val_loss: 1.3156 - val_accuracy: 0.5967
Epoch 10/300
181/181 [==============================] - 1s 7ms/step - loss: 1.5408 - accuracy: 0.4805 - val_loss: 1.2578 - val_accuracy: 0.6073
Epoch 11/300
181/181 [==============================] - 1s 7ms/step - loss: 1.4740 - accuracy: 0.4999 - val_loss: 1.2028 - val_accuracy: 0.6177
Epoch 12/300
181/181 [==============================] - 1s 7ms/step - loss: 1.4006 - accuracy: 0.5289 - val_loss: 1.1913 - val_accuracy: 0.6250
Epoch 13/300
181/181 [==============================] - 1s 7ms/step - loss: 1.3318 - accuracy: 0.5456 - val_loss: 1.0880 - val_accuracy: 0.6583
Epoch 14/300
181/181 [==============================] - 1s 7ms/step - loss: 1.2944 - accuracy: 0.5645 - val_loss: 1.1294 - val_accuracy: 0.6420
Epoch 15/300
181/181 [==============================] - 1s 7ms/step - loss: 1.2425 - accuracy: 0.5792 - val_loss: 1.0483 - val_accuracy: 0.6630
Epoch 16/300
181/181 [==============================] - 1s 7ms/step - loss: 1.1731 - accuracy: 0.5974 - val_loss: 0.9340 - val_accuracy: 0.7023
Epoch 17/300
181/181 [==============================] - 1s 7ms/step - loss: 1.1364 - accuracy: 0.6116 - val_loss: 0.9329 - val_accuracy: 0.7010
Epoch 18/300
181/181 [==============================] - 1s 7ms/step - loss: 1.1032 - accuracy: 0.6199 - val_loss: 0.9082 - val_accuracy: 0.7087
Epoch 19/300
181/181 [==============================] - 1s 7ms/step - loss: 1.0528 - accuracy: 0.6355 - val_loss: 0.8692 - val_accuracy: 0.7187
Epoch 20/300
181/181 [==============================] - 1s 7ms/step - loss: 1.0183 - accuracy: 0.6489 - val_loss: 0.8315 - val_accuracy: 0.7300
Epoch 21/300
181/181 [==============================] - 1s 7ms/step - loss: 0.9858 - accuracy: 0.6628 - val_loss: 0.8922 - val_accuracy: 0.7127
Epoch 22/300
181/181 [==============================] - 1s 6ms/step - loss: 0.9511 - accuracy: 0.6722 - val_loss: 0.7999 - val_accuracy: 0.7390
Epoch 23/300
181/181 [==============================] - 1s 7ms/step - loss: 0.9209 - accuracy: 0.6817 - val_loss: 0.7649 - val_accuracy: 0.7527
Epoch 24/300
181/181 [==============================] - 1s 7ms/step - loss: 0.8932 - accuracy: 0.6861 - val_loss: 0.8165 - val_accuracy: 0.7327
Epoch 25/300
181/181 [==============================] - 1s 7ms/step - loss: 0.8563 - accuracy: 0.7010 - val_loss: 0.7483 - val_accuracy: 0.7543
Epoch 26/300
181/181 [==============================] - 1s 7ms/step - loss: 0.8525 - accuracy: 0.7050 - val_loss: 0.7093 - val_accuracy: 0.7693
Epoch 27/300
181/181 [==============================] - 1s 7ms/step - loss: 0.8008 - accuracy: 0.7212 - val_loss: 0.6851 - val_accuracy: 0.7813
Epoch 28/300
181/181 [==============================] - 1s 7ms/step - loss: 0.7798 - accuracy: 0.7315 - val_loss: 0.7079 - val_accuracy: 0.7653
Epoch 29/300
181/181 [==============================] - 1s 7ms/step - loss: 0.7690 - accuracy: 0.7357 - val_loss: 0.6837 - val_accuracy: 0.7760
Epoch 30/300
181/181 [==============================] - 1s 7ms/step - loss: 0.7393 - accuracy: 0.7444 - val_loss: 0.7795 - val_accuracy: 0.7603
Epoch 31/300
181/181 [==============================] - 1s 7ms/step - loss: 0.7169 - accuracy: 0.7482 - val_loss: 0.7389 - val_accuracy: 0.7660
Epoch 32/300
181/181 [==============================] - 1s 7ms/step - loss: 0.6911 - accuracy: 0.7597 - val_loss: 0.6261 - val_accuracy: 0.7957
Epoch 33/300
181/181 [==============================] - 1s 7ms/step - loss: 0.6895 - accuracy: 0.7592 - val_loss: 0.6046 - val_accuracy: 0.8033
Epoch 34/300
181/181 [==============================] - 1s 7ms/step - loss: 0.6704 - accuracy: 0.7679 - val_loss: 0.6157 - val_accuracy: 0.8023
Epoch 35/300
181/181 [==============================] - 1s 7ms/step - loss: 0.6278 - accuracy: 0.7827 - val_loss: 0.6121 - val_accuracy: 0.7990
Epoch 36/300
181/181 [==============================] - 1s 7ms/step - loss: 0.6456 - accuracy: 0.7706 - val_loss: 0.6038 - val_accuracy: 0.7953
Epoch 37/300
181/181 [==============================] - 1s 7ms/step - loss: 0.6141 - accuracy: 0.7798 - val_loss: 0.5503 - val_accuracy: 0.8173
Epoch 38/300
181/181 [==============================] - 1s 7ms/step - loss: 0.5978 - accuracy: 0.7881 - val_loss: 0.5599 - val_accuracy: 0.8173
Epoch 39/300
181/181 [==============================] - 1s 7ms/step - loss: 0.5935 - accuracy: 0.7950 - val_loss: 0.5649 - val_accuracy: 0.8213
Epoch 40/300
181/181 [==============================] - 1s 7ms/step - loss: 0.5799 - accuracy: 0.7984 - val_loss: 0.7377 - val_accuracy: 0.7707
Epoch 41/300
181/181 [==============================] - 1s 7ms/step - loss: 0.5651 - accuracy: 0.7985 - val_loss: 0.5269 - val_accuracy: 0.8343
Epoch 42/300
181/181 [==============================] - 1s 8ms/step - loss: 0.5401 - accuracy: 0.8079 - val_loss: 0.5297 - val_accuracy: 0.8257
Epoch 43/300
181/181 [==============================] - 1s 7ms/step - loss: 0.5439 - accuracy: 0.8100 - val_loss: 0.5333 - val_accuracy: 0.8327
Epoch 44/300
181/181 [==============================] - 2s 8ms/step - loss: 0.5107 - accuracy: 0.8175 - val_loss: 0.5032 - val_accuracy: 0.8367
Epoch 45/300
181/181 [==============================] - 1s 8ms/step - loss: 0.5109 - accuracy: 0.8122 - val_loss: 0.4921 - val_accuracy: 0.8470
Epoch 46/300
181/181 [==============================] - 2s 9ms/step - loss: 0.4955 - accuracy: 0.8234 - val_loss: 0.4744 - val_accuracy: 0.8527
Epoch 47/300
181/181 [==============================] - 1s 8ms/step - loss: 0.4714 - accuracy: 0.8275 - val_loss: 0.4975 - val_accuracy: 0.8417
Epoch 48/300
181/181 [==============================] - 1s 8ms/step - loss: 0.4638 - accuracy: 0.8330 - val_loss: 0.5257 - val_accuracy: 0.8360
Epoch 49/300
181/181 [==============================] - 1s 7ms/step - loss: 0.4551 - accuracy: 0.8365 - val_loss: 0.4765 - val_accuracy: 0.8480
Epoch 50/300
181/181 [==============================] - 1s 8ms/step - loss: 0.4563 - accuracy: 0.8406 - val_loss: 0.4621 - val_accuracy: 0.8580
Epoch 51/300
181/181 [==============================] - 1s 8ms/step - loss: 0.4276 - accuracy: 0.8482 - val_loss: 0.4554 - val_accuracy: 0.8557
Epoch 52/300
181/181 [==============================] - 1s 8ms/step - loss: 0.4258 - accuracy: 0.8461 - val_loss: 0.4693 - val_accuracy: 0.8550
Epoch 53/300
181/181 [==============================] - 1s 7ms/step - loss: 0.4224 - accuracy: 0.8467 - val_loss: 0.4538 - val_accuracy: 0.8613
Epoch 54/300
181/181 [==============================] - 1s 8ms/step - loss: 0.4040 - accuracy: 0.8584 - val_loss: 0.4913 - val_accuracy: 0.8513
Epoch 55/300
181/181 [==============================] - 1s 8ms/step - loss: 0.4093 - accuracy: 0.8527 - val_loss: 0.4508 - val_accuracy: 0.8607
Epoch 56/300
181/181 [==============================] - 1s 8ms/step - loss: 0.3914 - accuracy: 0.8612 - val_loss: 0.4349 - val_accuracy: 0.8653
Epoch 57/300
181/181 [==============================] - 1s 8ms/step - loss: 0.3919 - accuracy: 0.8632 - val_loss: 0.4356 - val_accuracy: 0.8637
Epoch 58/300
181/181 [==============================] - 1s 8ms/step - loss: 0.3858 - accuracy: 0.8597 - val_loss: 0.5536 - val_accuracy: 0.8323
Epoch 59/300
181/181 [==============================] - 1s 7ms/step - loss: 0.3760 - accuracy: 0.8657 - val_loss: 0.4305 - val_accuracy: 0.8657
Epoch 60/300
181/181 [==============================] - 1s 7ms/step - loss: 0.3721 - accuracy: 0.8686 - val_loss: 0.4279 - val_accuracy: 0.8650
Epoch 61/300
181/181 [==============================] - 1s 8ms/step - loss: 0.3497 - accuracy: 0.8708 - val_loss: 0.4578 - val_accuracy: 0.8560
Epoch 62/300
181/181 [==============================] - 1s 7ms/step - loss: 0.3565 - accuracy: 0.8743 - val_loss: 0.4082 - val_accuracy: 0.8770
Epoch 63/300
181/181 [==============================] - 1s 7ms/step - loss: 0.3247 - accuracy: 0.8829 - val_loss: 0.5166 - val_accuracy: 0.8380
Epoch 64/300
181/181 [==============================] - 1s 7ms/step - loss: 0.3519 - accuracy: 0.8726 - val_loss: 0.5162 - val_accuracy: 0.8500
Epoch 65/300
181/181 [==============================] - 1s 7ms/step - loss: 0.3269 - accuracy: 0.8830 - val_loss: 0.4379 - val_accuracy: 0.8650
Epoch 66/300
181/181 [==============================] - 1s 7ms/step - loss: 0.3212 - accuracy: 0.8857 - val_loss: 0.4028 - val_accuracy: 0.8737
Epoch 67/300
181/181 [==============================] - 1s 8ms/step - loss: 0.3215 - accuracy: 0.8826 - val_loss: 0.4235 - val_accuracy: 0.8750
Epoch 68/300
181/181 [==============================] - 2s 9ms/step - loss: 0.3073 - accuracy: 0.8906 - val_loss: 0.3944 - val_accuracy: 0.8800
Epoch 69/300
181/181 [==============================] - 2s 9ms/step - loss: 0.3157 - accuracy: 0.8861 - val_loss: 0.5103 - val_accuracy: 0.8473
Epoch 70/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2987 - accuracy: 0.8957 - val_loss: 0.3603 - val_accuracy: 0.8927
Epoch 71/300
181/181 [==============================] - 1s 8ms/step - loss: 0.2910 - accuracy: 0.8926 - val_loss: 0.4690 - val_accuracy: 0.8567
Epoch 72/300
181/181 [==============================] - 1s 8ms/step - loss: 0.2904 - accuracy: 0.8946 - val_loss: 0.3859 - val_accuracy: 0.8830
Epoch 73/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2876 - accuracy: 0.8957 - val_loss: 0.3787 - val_accuracy: 0.8870
Epoch 74/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2607 - accuracy: 0.9025 - val_loss: 0.4700 - val_accuracy: 0.8607
Epoch 75/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2850 - accuracy: 0.8953 - val_loss: 0.3890 - val_accuracy: 0.8857
Epoch 76/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2784 - accuracy: 0.8998 - val_loss: 0.3777 - val_accuracy: 0.8860
Epoch 77/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2806 - accuracy: 0.9002 - val_loss: 0.3957 - val_accuracy: 0.8797
Epoch 78/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2572 - accuracy: 0.9064 - val_loss: 0.4124 - val_accuracy: 0.8793
Epoch 79/300
181/181 [==============================] - 1s 8ms/step - loss: 0.2632 - accuracy: 0.9057 - val_loss: 0.3854 - val_accuracy: 0.8900
Epoch 80/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2562 - accuracy: 0.9085 - val_loss: 0.3693 - val_accuracy: 0.8913
Epoch 81/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2549 - accuracy: 0.9098 - val_loss: 0.4148 - val_accuracy: 0.8817
Epoch 82/300
181/181 [==============================] - 1s 8ms/step - loss: 0.2540 - accuracy: 0.9102 - val_loss: 0.3670 - val_accuracy: 0.8877
Epoch 83/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2424 - accuracy: 0.9141 - val_loss: 0.3724 - val_accuracy: 0.8903
Epoch 84/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2374 - accuracy: 0.9135 - val_loss: 0.3820 - val_accuracy: 0.8860
Epoch 85/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2247 - accuracy: 0.9159 - val_loss: 0.3962 - val_accuracy: 0.8813
Epoch 86/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2353 - accuracy: 0.9147 - val_loss: 0.3757 - val_accuracy: 0.8953
Epoch 87/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2280 - accuracy: 0.9200 - val_loss: 0.3472 - val_accuracy: 0.9000
Epoch 88/300
181/181 [==============================] - 1s 8ms/step - loss: 0.2292 - accuracy: 0.9159 - val_loss: 0.4288 - val_accuracy: 0.8737
Epoch 89/300
181/181 [==============================] - 2s 9ms/step - loss: 0.2198 - accuracy: 0.9176 - val_loss: 0.3776 - val_accuracy: 0.8963
Epoch 90/300
181/181 [==============================] - 2s 8ms/step - loss: 0.2239 - accuracy: 0.9206 - val_loss: 0.3644 - val_accuracy: 0.8943
Epoch 91/300
181/181 [==============================] - 2s 9ms/step - loss: 0.2175 - accuracy: 0.9191 - val_loss: 0.3744 - val_accuracy: 0.8920
Epoch 92/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2062 - accuracy: 0.9265 - val_loss: 0.6580 - val_accuracy: 0.8180
Epoch 93/300
181/181 [==============================] - 2s 8ms/step - loss: 0.2090 - accuracy: 0.9269 - val_loss: 0.3722 - val_accuracy: 0.8917
Epoch 94/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2106 - accuracy: 0.9223 - val_loss: 0.3449 - val_accuracy: 0.9037
Epoch 95/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2027 - accuracy: 0.9262 - val_loss: 0.3472 - val_accuracy: 0.9027
Epoch 96/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2057 - accuracy: 0.9244 - val_loss: 0.3543 - val_accuracy: 0.8980
Epoch 97/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2096 - accuracy: 0.9225 - val_loss: 0.3619 - val_accuracy: 0.8957
Epoch 98/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1908 - accuracy: 0.9307 - val_loss: 0.3316 - val_accuracy: 0.9050
Epoch 99/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2007 - accuracy: 0.9270 - val_loss: 0.3295 - val_accuracy: 0.9060
Epoch 100/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1890 - accuracy: 0.9305 - val_loss: 0.3452 - val_accuracy: 0.8990
Epoch 101/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1938 - accuracy: 0.9276 - val_loss: 0.3946 - val_accuracy: 0.8880
Epoch 102/300
181/181 [==============================] - 1s 7ms/step - loss: 0.2019 - accuracy: 0.9259 - val_loss: 0.3246 - val_accuracy: 0.9067
Epoch 103/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1840 - accuracy: 0.9304 - val_loss: 0.3360 - val_accuracy: 0.9027
Epoch 104/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1809 - accuracy: 0.9364 - val_loss: 0.3974 - val_accuracy: 0.8803
Epoch 105/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1802 - accuracy: 0.9326 - val_loss: 0.3364 - val_accuracy: 0.9043
Epoch 106/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1688 - accuracy: 0.9363 - val_loss: 0.3242 - val_accuracy: 0.9087
Epoch 107/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1818 - accuracy: 0.9319 - val_loss: 0.3183 - val_accuracy: 0.9090
Epoch 108/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1655 - accuracy: 0.9381 - val_loss: 0.3265 - val_accuracy: 0.9043
Epoch 109/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1637 - accuracy: 0.9390 - val_loss: 0.3271 - val_accuracy: 0.9103
Epoch 110/300
181/181 [==============================] - 2s 8ms/step - loss: 0.1646 - accuracy: 0.9394 - val_loss: 0.3316 - val_accuracy: 0.9080
Epoch 111/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1719 - accuracy: 0.9376 - val_loss: 0.3700 - val_accuracy: 0.8947
Epoch 112/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1751 - accuracy: 0.9360 - val_loss: 0.3390 - val_accuracy: 0.9057
Epoch 113/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1664 - accuracy: 0.9402 - val_loss: 0.3425 - val_accuracy: 0.9067
Epoch 114/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1560 - accuracy: 0.9434 - val_loss: 0.3399 - val_accuracy: 0.9040
Epoch 115/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1664 - accuracy: 0.9394 - val_loss: 0.3464 - val_accuracy: 0.9013
Epoch 116/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1635 - accuracy: 0.9389 - val_loss: 0.3758 - val_accuracy: 0.8987
Epoch 117/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1528 - accuracy: 0.9471 - val_loss: 0.3505 - val_accuracy: 0.9093
Epoch 118/300
181/181 [==============================] - 2s 8ms/step - loss: 0.1511 - accuracy: 0.9429 - val_loss: 0.3456 - val_accuracy: 0.9050
Epoch 119/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1541 - accuracy: 0.9452 - val_loss: 0.3529 - val_accuracy: 0.9037
Epoch 120/300
181/181 [==============================] - 1s 7ms/step - loss: 0.1473 - accuracy: 0.9460 - val_loss: 0.3488 - val_accuracy: 0.9043
Epoch 121/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1542 - accuracy: 0.9452 - val_loss: 0.3431 - val_accuracy: 0.9103
Epoch 122/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1486 - accuracy: 0.9459 - val_loss: 0.4929 - val_accuracy: 0.8620
Epoch 123/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1520 - accuracy: 0.9436 - val_loss: 0.3878 - val_accuracy: 0.8943
Epoch 124/300
181/181 [==============================] - 2s 8ms/step - loss: 0.1464 - accuracy: 0.9448 - val_loss: 0.3294 - val_accuracy: 0.9160
Epoch 125/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1480 - accuracy: 0.9467 - val_loss: 0.3071 - val_accuracy: 0.9187
Epoch 126/300
181/181 [==============================] - 2s 8ms/step - loss: 0.1516 - accuracy: 0.9459 - val_loss: 0.3749 - val_accuracy: 0.9007
Epoch 127/300
181/181 [==============================] - 2s 8ms/step - loss: 0.1497 - accuracy: 0.9455 - val_loss: 0.3201 - val_accuracy: 0.9147
Epoch 128/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1389 - accuracy: 0.9471 - val_loss: 0.3759 - val_accuracy: 0.9053
Epoch 129/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1447 - accuracy: 0.9454 - val_loss: 0.5813 - val_accuracy: 0.8407
Epoch 130/300
181/181 [==============================] - 2s 8ms/step - loss: 0.1304 - accuracy: 0.9554 - val_loss: 0.3177 - val_accuracy: 0.9163
Epoch 131/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1347 - accuracy: 0.9513 - val_loss: 0.3247 - val_accuracy: 0.9147
Epoch 132/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1353 - accuracy: 0.9522 - val_loss: 0.3363 - val_accuracy: 0.9127
Epoch 133/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1381 - accuracy: 0.9510 - val_loss: 0.3163 - val_accuracy: 0.9187
Epoch 134/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1275 - accuracy: 0.9532 - val_loss: 0.3308 - val_accuracy: 0.9147
Epoch 135/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1239 - accuracy: 0.9558 - val_loss: 0.3470 - val_accuracy: 0.9060
Epoch 136/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1361 - accuracy: 0.9505 - val_loss: 0.4106 - val_accuracy: 0.8927
Epoch 137/300
181/181 [==============================] - 2s 8ms/step - loss: 0.1265 - accuracy: 0.9546 - val_loss: 0.4419 - val_accuracy: 0.8920
Epoch 138/300
181/181 [==============================] - 2s 8ms/step - loss: 0.1271 - accuracy: 0.9529 - val_loss: 0.3257 - val_accuracy: 0.9147
Epoch 139/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1367 - accuracy: 0.9494 - val_loss: 0.3252 - val_accuracy: 0.9150
Epoch 140/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1271 - accuracy: 0.9547 - val_loss: 0.3084 - val_accuracy: 0.9160
Epoch 141/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1177 - accuracy: 0.9576 - val_loss: 0.3243 - val_accuracy: 0.9177
Epoch 142/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1266 - accuracy: 0.9533 - val_loss: 0.3124 - val_accuracy: 0.9167
Epoch 143/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1157 - accuracy: 0.9566 - val_loss: 0.3344 - val_accuracy: 0.9170
Epoch 144/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1200 - accuracy: 0.9556 - val_loss: 0.3422 - val_accuracy: 0.9120
Epoch 145/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1307 - accuracy: 0.9528 - val_loss: 0.3226 - val_accuracy: 0.9103
Epoch 146/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1201 - accuracy: 0.9534 - val_loss: 0.3636 - val_accuracy: 0.9033
Epoch 147/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1179 - accuracy: 0.9557 - val_loss: 0.3183 - val_accuracy: 0.9193
Epoch 148/300
181/181 [==============================] - 2s 11ms/step - loss: 0.1193 - accuracy: 0.9542 - val_loss: 0.3396 - val_accuracy: 0.9087
Epoch 149/300
181/181 [==============================] - 2s 11ms/step - loss: 0.1179 - accuracy: 0.9558 - val_loss: 0.3276 - val_accuracy: 0.9133
Epoch 150/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1135 - accuracy: 0.9576 - val_loss: 0.3080 - val_accuracy: 0.9200
Epoch 151/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1076 - accuracy: 0.9591 - val_loss: 0.3079 - val_accuracy: 0.9140
Epoch 152/300
181/181 [==============================] - 2s 8ms/step - loss: 0.1054 - accuracy: 0.9618 - val_loss: 0.4372 - val_accuracy: 0.8877
Epoch 153/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1028 - accuracy: 0.9601 - val_loss: 0.5367 - val_accuracy: 0.8630
Epoch 154/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1126 - accuracy: 0.9621 - val_loss: 0.3123 - val_accuracy: 0.9220
Epoch 155/300
181/181 [==============================] - 2s 9ms/step - loss: 0.1136 - accuracy: 0.9599 - val_loss: 0.2978 - val_accuracy: 0.9213
Epoch 156/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1017 - accuracy: 0.9607 - val_loss: 0.3016 - val_accuracy: 0.9227
Epoch 157/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1159 - accuracy: 0.9603 - val_loss: 0.3286 - val_accuracy: 0.9163
Epoch 158/300
181/181 [==============================] - 2s 10ms/step - loss: 0.0998 - accuracy: 0.9635 - val_loss: 0.3652 - val_accuracy: 0.9067
Epoch 159/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1092 - accuracy: 0.9618 - val_loss: 0.5465 - val_accuracy: 0.8623
Epoch 160/300
181/181 [==============================] - 2s 11ms/step - loss: 0.1033 - accuracy: 0.9625 - val_loss: 0.3650 - val_accuracy: 0.9070
Epoch 161/300
181/181 [==============================] - 2s 11ms/step - loss: 0.1017 - accuracy: 0.9631 - val_loss: 0.3026 - val_accuracy: 0.9223
Epoch 162/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1199 - accuracy: 0.9579 - val_loss: 0.3390 - val_accuracy: 0.9140
Epoch 163/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0962 - accuracy: 0.9669 - val_loss: 0.3628 - val_accuracy: 0.9100
Epoch 164/300
181/181 [==============================] - 2s 12ms/step - loss: 0.1001 - accuracy: 0.9646 - val_loss: 0.2986 - val_accuracy: 0.9220
Epoch 165/300
181/181 [==============================] - 2s 11ms/step - loss: 0.1023 - accuracy: 0.9618 - val_loss: 0.3059 - val_accuracy: 0.9130
Epoch 166/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1036 - accuracy: 0.9616 - val_loss: 0.5719 - val_accuracy: 0.8667
Epoch 167/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0935 - accuracy: 0.9653 - val_loss: 0.3260 - val_accuracy: 0.9170
Epoch 168/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0985 - accuracy: 0.9653 - val_loss: 0.3109 - val_accuracy: 0.9193
Epoch 169/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0910 - accuracy: 0.9676 - val_loss: 0.3288 - val_accuracy: 0.9160
Epoch 170/300
181/181 [==============================] - 2s 10ms/step - loss: 0.1007 - accuracy: 0.9638 - val_loss: 0.3346 - val_accuracy: 0.9150
Epoch 171/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1000 - accuracy: 0.9646 - val_loss: 0.3229 - val_accuracy: 0.9180
Epoch 172/300
181/181 [==============================] - 1s 8ms/step - loss: 0.1031 - accuracy: 0.9636 - val_loss: 0.3046 - val_accuracy: 0.9193
Epoch 173/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0869 - accuracy: 0.9692 - val_loss: 0.3336 - val_accuracy: 0.9177
Epoch 174/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0969 - accuracy: 0.9660 - val_loss: 0.3093 - val_accuracy: 0.9230
Epoch 175/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0906 - accuracy: 0.9692 - val_loss: 0.4506 - val_accuracy: 0.8933
Epoch 176/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0950 - accuracy: 0.9642 - val_loss: 0.2942 - val_accuracy: 0.9220
Epoch 177/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0884 - accuracy: 0.9692 - val_loss: 0.3255 - val_accuracy: 0.9150
Epoch 178/300
181/181 [==============================] - 2s 10ms/step - loss: 0.0891 - accuracy: 0.9658 - val_loss: 0.3006 - val_accuracy: 0.9257
Epoch 179/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0907 - accuracy: 0.9680 - val_loss: 0.2914 - val_accuracy: 0.9260
Epoch 180/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0902 - accuracy: 0.9662 - val_loss: 0.3213 - val_accuracy: 0.9193
Epoch 181/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0899 - accuracy: 0.9671 - val_loss: 0.3693 - val_accuracy: 0.9063
Epoch 182/300
181/181 [==============================] - 2s 10ms/step - loss: 0.0887 - accuracy: 0.9701 - val_loss: 0.3423 - val_accuracy: 0.9143
Epoch 183/300
181/181 [==============================] - 2s 10ms/step - loss: 0.0967 - accuracy: 0.9641 - val_loss: 0.3022 - val_accuracy: 0.9207
Epoch 184/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0935 - accuracy: 0.9689 - val_loss: 0.2945 - val_accuracy: 0.9267
Epoch 185/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0890 - accuracy: 0.9694 - val_loss: 0.3516 - val_accuracy: 0.9123
Epoch 186/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0959 - accuracy: 0.9674 - val_loss: 0.2974 - val_accuracy: 0.9277
Epoch 187/300
181/181 [==============================] - 2s 10ms/step - loss: 0.0848 - accuracy: 0.9680 - val_loss: 0.3392 - val_accuracy: 0.9133
Epoch 188/300
181/181 [==============================] - 2s 11ms/step - loss: 0.0861 - accuracy: 0.9681 - val_loss: 0.3290 - val_accuracy: 0.9177
Epoch 189/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0929 - accuracy: 0.9672 - val_loss: 0.3532 - val_accuracy: 0.9147
Epoch 190/300
181/181 [==============================] - 2s 10ms/step - loss: 0.0887 - accuracy: 0.9682 - val_loss: 0.3149 - val_accuracy: 0.9237
Epoch 191/300
181/181 [==============================] - 2s 10ms/step - loss: 0.0822 - accuracy: 0.9690 - val_loss: 0.2935 - val_accuracy: 0.9260
Epoch 192/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0923 - accuracy: 0.9672 - val_loss: 0.3629 - val_accuracy: 0.9110
Epoch 193/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0877 - accuracy: 0.9694 - val_loss: 0.3481 - val_accuracy: 0.9110
Epoch 194/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0917 - accuracy: 0.9656 - val_loss: 0.2938 - val_accuracy: 0.9177
Epoch 195/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0812 - accuracy: 0.9719 - val_loss: 0.3192 - val_accuracy: 0.9137
Epoch 196/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0768 - accuracy: 0.9722 - val_loss: 0.2997 - val_accuracy: 0.9233
Epoch 197/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0788 - accuracy: 0.9703 - val_loss: 0.2957 - val_accuracy: 0.9240
Epoch 198/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0806 - accuracy: 0.9702 - val_loss: 0.3021 - val_accuracy: 0.9257
Epoch 199/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0806 - accuracy: 0.9712 - val_loss: 0.3356 - val_accuracy: 0.9193
Epoch 200/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0820 - accuracy: 0.9705 - val_loss: 0.3084 - val_accuracy: 0.9220
Epoch 201/300
181/181 [==============================] - 3s 17ms/step - loss: 0.0815 - accuracy: 0.9697 - val_loss: 0.2811 - val_accuracy: 0.9267
Epoch 202/300
181/181 [==============================] - 3s 16ms/step - loss: 0.0860 - accuracy: 0.9686 - val_loss: 0.3294 - val_accuracy: 0.9197
Epoch 203/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0773 - accuracy: 0.9710 - val_loss: 0.3285 - val_accuracy: 0.9130
Epoch 204/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0775 - accuracy: 0.9736 - val_loss: 0.3270 - val_accuracy: 0.9200
Epoch 205/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0805 - accuracy: 0.9693 - val_loss: 0.3356 - val_accuracy: 0.9133
Epoch 206/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0738 - accuracy: 0.9740 - val_loss: 0.2910 - val_accuracy: 0.9217
Epoch 207/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0744 - accuracy: 0.9739 - val_loss: 0.3136 - val_accuracy: 0.9240
Epoch 208/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0768 - accuracy: 0.9723 - val_loss: 0.7566 - val_accuracy: 0.8337
Epoch 209/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0822 - accuracy: 0.9684 - val_loss: 0.3277 - val_accuracy: 0.9137
Epoch 210/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0694 - accuracy: 0.9745 - val_loss: 0.3263 - val_accuracy: 0.9233
Epoch 211/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0667 - accuracy: 0.9749 - val_loss: 0.2968 - val_accuracy: 0.9273
Epoch 212/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0813 - accuracy: 0.9697 - val_loss: 0.3652 - val_accuracy: 0.9173
Epoch 213/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0770 - accuracy: 0.9709 - val_loss: 0.3573 - val_accuracy: 0.9167
Epoch 214/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0730 - accuracy: 0.9731 - val_loss: 0.3968 - val_accuracy: 0.9023
Epoch 215/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0750 - accuracy: 0.9725 - val_loss: 0.3082 - val_accuracy: 0.9207
Epoch 216/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0754 - accuracy: 0.9734 - val_loss: 0.3429 - val_accuracy: 0.9143
Epoch 217/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0723 - accuracy: 0.9748 - val_loss: 0.3596 - val_accuracy: 0.9107
Epoch 218/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0716 - accuracy: 0.9722 - val_loss: 0.3280 - val_accuracy: 0.9173
Epoch 219/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0746 - accuracy: 0.9715 - val_loss: 0.3555 - val_accuracy: 0.9157
Epoch 220/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0747 - accuracy: 0.9733 - val_loss: 0.3476 - val_accuracy: 0.9097
Epoch 221/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0709 - accuracy: 0.9748 - val_loss: 0.3069 - val_accuracy: 0.9263
Epoch 222/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0758 - accuracy: 0.9739 - val_loss: 0.3068 - val_accuracy: 0.9263
Epoch 223/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0699 - accuracy: 0.9729 - val_loss: 0.2960 - val_accuracy: 0.9277
Epoch 224/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0726 - accuracy: 0.9742 - val_loss: 0.2954 - val_accuracy: 0.9250
Epoch 225/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0646 - accuracy: 0.9766 - val_loss: 0.3254 - val_accuracy: 0.9197
Epoch 226/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0625 - accuracy: 0.9767 - val_loss: 0.3111 - val_accuracy: 0.9243
Epoch 227/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0664 - accuracy: 0.9765 - val_loss: 0.3439 - val_accuracy: 0.9183
Epoch 228/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0705 - accuracy: 0.9741 - val_loss: 0.3087 - val_accuracy: 0.9237
Epoch 229/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0746 - accuracy: 0.9720 - val_loss: 0.3110 - val_accuracy: 0.9210
Epoch 230/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0740 - accuracy: 0.9748 - val_loss: 0.2746 - val_accuracy: 0.9323
Epoch 231/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0718 - accuracy: 0.9741 - val_loss: 0.2925 - val_accuracy: 0.9277
Epoch 232/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0620 - accuracy: 0.9767 - val_loss: 0.3592 - val_accuracy: 0.9163
Epoch 233/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0647 - accuracy: 0.9761 - val_loss: 0.3344 - val_accuracy: 0.9203
Epoch 234/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0630 - accuracy: 0.9784 - val_loss: 0.3121 - val_accuracy: 0.9233
Epoch 235/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0707 - accuracy: 0.9760 - val_loss: 0.2928 - val_accuracy: 0.9293
Epoch 236/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0643 - accuracy: 0.9790 - val_loss: 0.2971 - val_accuracy: 0.9267
Epoch 237/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0584 - accuracy: 0.9769 - val_loss: 0.2933 - val_accuracy: 0.9287
Epoch 238/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0660 - accuracy: 0.9756 - val_loss: 0.3641 - val_accuracy: 0.9153
Epoch 239/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0600 - accuracy: 0.9792 - val_loss: 0.3101 - val_accuracy: 0.9253
Epoch 240/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0666 - accuracy: 0.9763 - val_loss: 0.3496 - val_accuracy: 0.9170
Epoch 241/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0611 - accuracy: 0.9770 - val_loss: 0.3393 - val_accuracy: 0.9210
Epoch 242/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0614 - accuracy: 0.9767 - val_loss: 0.3634 - val_accuracy: 0.9180
Epoch 243/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0658 - accuracy: 0.9785 - val_loss: 0.3092 - val_accuracy: 0.9253
Epoch 244/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0681 - accuracy: 0.9763 - val_loss: 0.3992 - val_accuracy: 0.9067
Epoch 245/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0590 - accuracy: 0.9780 - val_loss: 0.4400 - val_accuracy: 0.8973
Epoch 246/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0650 - accuracy: 0.9753 - val_loss: 0.3428 - val_accuracy: 0.9210
Epoch 247/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0619 - accuracy: 0.9786 - val_loss: 0.2926 - val_accuracy: 0.9333
Epoch 248/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0671 - accuracy: 0.9745 - val_loss: 0.3123 - val_accuracy: 0.9267
Epoch 249/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0579 - accuracy: 0.9789 - val_loss: 0.3362 - val_accuracy: 0.9143
Epoch 250/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0624 - accuracy: 0.9774 - val_loss: 0.3625 - val_accuracy: 0.9177
Epoch 251/300
181/181 [==============================] - 2s 10ms/step - loss: 0.0668 - accuracy: 0.9771 - val_loss: 0.3229 - val_accuracy: 0.9263
Epoch 252/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0587 - accuracy: 0.9790 - val_loss: 0.3016 - val_accuracy: 0.9270
Epoch 253/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0628 - accuracy: 0.9771 - val_loss: 0.2968 - val_accuracy: 0.9297
Epoch 254/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0578 - accuracy: 0.9791 - val_loss: 0.6094 - val_accuracy: 0.8793
Epoch 255/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0653 - accuracy: 0.9784 - val_loss: 0.3079 - val_accuracy: 0.9267
Epoch 256/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0574 - accuracy: 0.9793 - val_loss: 0.2879 - val_accuracy: 0.9310
Epoch 257/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0621 - accuracy: 0.9777 - val_loss: 0.3316 - val_accuracy: 0.9193
Epoch 258/300
181/181 [==============================] - 2s 10ms/step - loss: 0.0595 - accuracy: 0.9789 - val_loss: 0.3240 - val_accuracy: 0.9240
Epoch 259/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0656 - accuracy: 0.9758 - val_loss: 0.3346 - val_accuracy: 0.9190
Epoch 260/300
181/181 [==============================] - 2s 10ms/step - loss: 0.0587 - accuracy: 0.9785 - val_loss: 0.4449 - val_accuracy: 0.9003
Epoch 261/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0544 - accuracy: 0.9811 - val_loss: 0.3170 - val_accuracy: 0.9267
Epoch 262/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0608 - accuracy: 0.9793 - val_loss: 0.3867 - val_accuracy: 0.9137
Epoch 263/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0629 - accuracy: 0.9770 - val_loss: 0.3981 - val_accuracy: 0.9083
Epoch 264/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0518 - accuracy: 0.9808 - val_loss: 0.3244 - val_accuracy: 0.9280
Epoch 265/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0643 - accuracy: 0.9752 - val_loss: 0.3163 - val_accuracy: 0.9260
Epoch 266/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0544 - accuracy: 0.9792 - val_loss: 0.3425 - val_accuracy: 0.9177
Epoch 267/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0533 - accuracy: 0.9796 - val_loss: 0.3305 - val_accuracy: 0.9220
Epoch 268/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0507 - accuracy: 0.9795 - val_loss: 0.2897 - val_accuracy: 0.9320
Epoch 269/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0541 - accuracy: 0.9815 - val_loss: 0.3775 - val_accuracy: 0.9167
Epoch 270/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0605 - accuracy: 0.9777 - val_loss: 0.3995 - val_accuracy: 0.9097
Epoch 271/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0571 - accuracy: 0.9795 - val_loss: 0.2785 - val_accuracy: 0.9327
Epoch 272/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0585 - accuracy: 0.9789 - val_loss: 0.3104 - val_accuracy: 0.9303
Epoch 273/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0533 - accuracy: 0.9822 - val_loss: 0.4265 - val_accuracy: 0.9010
Epoch 274/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0491 - accuracy: 0.9834 - val_loss: 0.3251 - val_accuracy: 0.9223
Epoch 275/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0658 - accuracy: 0.9792 - val_loss: 0.2923 - val_accuracy: 0.9350
Epoch 276/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0517 - accuracy: 0.9816 - val_loss: 0.3034 - val_accuracy: 0.9273
Epoch 277/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0573 - accuracy: 0.9807 - val_loss: 0.2901 - val_accuracy: 0.9307
Epoch 278/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0551 - accuracy: 0.9800 - val_loss: 0.2868 - val_accuracy: 0.9290
Epoch 279/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0553 - accuracy: 0.9797 - val_loss: 0.3060 - val_accuracy: 0.9297
Epoch 280/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0625 - accuracy: 0.9774 - val_loss: 0.3595 - val_accuracy: 0.9173
Epoch 281/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0561 - accuracy: 0.9792 - val_loss: 0.3444 - val_accuracy: 0.9243
Epoch 282/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0593 - accuracy: 0.9772 - val_loss: 0.3419 - val_accuracy: 0.9223
Epoch 283/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0552 - accuracy: 0.9810 - val_loss: 0.3364 - val_accuracy: 0.9227
Epoch 284/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0568 - accuracy: 0.9793 - val_loss: 0.2900 - val_accuracy: 0.9323
Epoch 285/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0497 - accuracy: 0.9823 - val_loss: 0.3956 - val_accuracy: 0.9100
Epoch 286/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0482 - accuracy: 0.9824 - val_loss: 0.3412 - val_accuracy: 0.9207
Epoch 287/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0524 - accuracy: 0.9814 - val_loss: 0.3492 - val_accuracy: 0.9217
Epoch 288/300
181/181 [==============================] - 1s 7ms/step - loss: 0.0536 - accuracy: 0.9801 - val_loss: 0.3324 - val_accuracy: 0.9240
Epoch 289/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0509 - accuracy: 0.9815 - val_loss: 0.3166 - val_accuracy: 0.9277
Epoch 290/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0479 - accuracy: 0.9835 - val_loss: 0.3307 - val_accuracy: 0.9233
Epoch 291/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0563 - accuracy: 0.9804 - val_loss: 0.3155 - val_accuracy: 0.9233
Epoch 292/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0446 - accuracy: 0.9824 - val_loss: 0.3024 - val_accuracy: 0.9293
Epoch 293/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0522 - accuracy: 0.9823 - val_loss: 0.5212 - val_accuracy: 0.8810
Epoch 294/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0561 - accuracy: 0.9806 - val_loss: 0.3369 - val_accuracy: 0.9187
Epoch 295/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0524 - accuracy: 0.9823 - val_loss: 0.3524 - val_accuracy: 0.9207
Epoch 296/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0539 - accuracy: 0.9817 - val_loss: 0.2912 - val_accuracy: 0.9317
Epoch 297/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0483 - accuracy: 0.9834 - val_loss: 0.2998 - val_accuracy: 0.9267
Epoch 298/300
181/181 [==============================] - 2s 9ms/step - loss: 0.0509 - accuracy: 0.9811 - val_loss: 0.3117 - val_accuracy: 0.9293
Epoch 299/300
181/181 [==============================] - 1s 8ms/step - loss: 0.0483 - accuracy: 0.9825 - val_loss: 0.3358 - val_accuracy: 0.9267
Epoch 300/300
181/181 [==============================] - 2s 8ms/step - loss: 0.0485 - accuracy: 0.9817 - val_loss: 0.2857 - val_accuracy: 0.9303
In [29]:
plt.figure()
plt.plot(history37.history["loss"])
plt.plot(history37.history["val_loss"])
plt.title('Model loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper right')
plt.show()
No description has been provided for this image

From the model loss graph, there is no overfitting of the model onto the training data.

In [30]:
plt.figure()
plt.plot(history37.history["accuracy"])
plt.plot(history37.history["val_accuracy"])
plt.title('Model Accuracy')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper right')
plt.show()
No description has been provided for this image
In [31]:
model37.save('model37.h5')
model37.save_weights('model37_weights.h5')
In [32]:
#import model and weights
from tensorflow.keras.models import load_model
from keras.utils.vis_utils import plot_model

model37 = load_model('model37.h5')

model37.load_weights('model37_weights.h5')

model37.summary()

y_pred = model37.predict(X_test37)

from sklearn.metrics import confusion_matrix

confusion_matrix(np.argmax(y_test37, axis=1), np.argmax(y_pred, axis=1))

# Graph the confusion matrix

import seaborn as sns
import pandas as pd

cm = confusion_matrix(np.argmax(y_test37, axis=1), np.argmax(y_pred, axis=1))

pd.options.display.float_format = '{:.2f}'.format

df_cm = pd.DataFrame(cm, index = [i for i in test_dataset_37.class_names],
                    columns = [i for i in test_dataset_37.class_names])

loss, accuracy = model37.evaluate(X_test37, y_test37)

plt.figure(figsize=(10,7))
sns.heatmap(df_cm, annot=True, fmt='d')
plt.title(f'Image size 37\nLoss: {loss:.3f}, Accuracy: {accuracy:.3f}')
plt.xlabel('Predicted')
plt.ylabel('Actual')
plt.show()

plot_model(model37, show_shapes=True, show_layer_names=True, show_layer_activations=True, expand_nested=True)
Model: "sequential_1"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d_3 (Conv2D)           (None, 35, 35, 32)        320       
                                                                 
 batch_normalization_3 (Batc  (None, 35, 35, 32)       128       
 hNormalization)                                                 
                                                                 
 max_pooling2d_2 (MaxPooling  (None, 17, 17, 32)       0         
 2D)                                                             
                                                                 
 conv2d_4 (Conv2D)           (None, 15, 15, 64)        18496     
                                                                 
 batch_normalization_4 (Batc  (None, 15, 15, 64)       256       
 hNormalization)                                                 
                                                                 
 dropout_4 (Dropout)         (None, 15, 15, 64)        0         
                                                                 
 max_pooling2d_3 (MaxPooling  (None, 7, 7, 64)         0         
 2D)                                                             
                                                                 
 conv2d_5 (Conv2D)           (None, 5, 5, 64)          36928     
                                                                 
 batch_normalization_5 (Batc  (None, 5, 5, 64)         256       
 hNormalization)                                                 
                                                                 
 dropout_5 (Dropout)         (None, 5, 5, 64)          0         
                                                                 
 flatten_1 (Flatten)         (None, 1600)              0         
                                                                 
 dense_3 (Dense)             (None, 100)               160100    
                                                                 
 dropout_6 (Dropout)         (None, 100)               0         
                                                                 
 dense_4 (Dense)             (None, 50)                5050      
                                                                 
 dropout_7 (Dropout)         (None, 50)                0         
                                                                 
 dense_5 (Dense)             (None, 15)                765       
                                                                 
=================================================================
Total params: 222,299
Trainable params: 221,979
Non-trainable params: 320
_________________________________________________________________
94/94 [==============================] - 1s 5ms/step
94/94 [==============================] - 0s 3ms/step - loss: 0.3051 - accuracy: 0.9280
No description has been provided for this image
Out[32]:
No description has been provided for this image

The 37x37 Model has a loss of 0.305 and an accuracy of 92.8% when tested on the testing data.

From the confusion matrix, you can see that the model doesn’t do so well on predicting cabbages as 17 cabbages were classified as cauliflower instead.

Creating the model for 131x131 images¶

In [33]:
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, Flatten, MaxPooling2D, Dropout, BatchNormalization
In [34]:
# fix random seed for reproducibility
seed = 88
np.random.seed(seed)
In [36]:
from tensorflow.keras.optimizers import Adam
from tensorflow.keras.layers import Conv2D, BatchNormalization, MaxPooling2D, Dropout, GlobalAveragePooling2D, Dense
from tensorflow.keras.models import Sequential

model131 = Sequential()

model131.add(Conv2D(32, (3, 3), activation='relu', input_shape=(131, 131, 1)))
model131.add(BatchNormalization())
model131.add(MaxPooling2D((2, 2)))

model131.add(Conv2D(64, (3, 3), activation='relu'))
model131.add(BatchNormalization())
model131.add(MaxPooling2D((2, 2)))

model131.add(Conv2D(128, (3, 3), activation='relu'))
model131.add(BatchNormalization())
model131.add(Dropout(0.3))
model131.add(MaxPooling2D((2, 2)))

model131.add(Conv2D(256, (3, 3), activation='relu'))
model131.add(BatchNormalization())
model131.add(Dropout(0.4))
model131.add(MaxPooling2D((2, 2)))

model131.add(GlobalAveragePooling2D())

model131.add(Dense(100, activation='relu'))
model131.add(Dropout(0.3))

model131.add(Dense(50, activation='relu'))
model131.add(Dropout(0.2))

model131.add(Dense(15, activation='softmax'))

optimizer = Adam(learning_rate=0.00002)

model131.compile(optimizer=optimizer,
              loss='categorical_crossentropy',
              metrics=['accuracy'])

model131.summary()
Model: "sequential_3"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d_9 (Conv2D)           (None, 129, 129, 32)      320       
                                                                 
 batch_normalization_9 (Batc  (None, 129, 129, 32)     128       
 hNormalization)                                                 
                                                                 
 max_pooling2d_6 (MaxPooling  (None, 64, 64, 32)       0         
 2D)                                                             
                                                                 
 conv2d_10 (Conv2D)          (None, 62, 62, 64)        18496     
                                                                 
 batch_normalization_10 (Bat  (None, 62, 62, 64)       256       
 chNormalization)                                                
                                                                 
 max_pooling2d_7 (MaxPooling  (None, 31, 31, 64)       0         
 2D)                                                             
                                                                 
 conv2d_11 (Conv2D)          (None, 29, 29, 128)       73856     
                                                                 
 batch_normalization_11 (Bat  (None, 29, 29, 128)      512       
 chNormalization)                                                
                                                                 
 dropout_12 (Dropout)        (None, 29, 29, 128)       0         
                                                                 
 max_pooling2d_8 (MaxPooling  (None, 14, 14, 128)      0         
 2D)                                                             
                                                                 
 conv2d_12 (Conv2D)          (None, 12, 12, 256)       295168    
                                                                 
 batch_normalization_12 (Bat  (None, 12, 12, 256)      1024      
 chNormalization)                                                
                                                                 
 dropout_13 (Dropout)        (None, 12, 12, 256)       0         
                                                                 
 max_pooling2d_9 (MaxPooling  (None, 6, 6, 256)        0         
 2D)                                                             
                                                                 
 global_average_pooling2d (G  (None, 256)              0         
 lobalAveragePooling2D)                                          
                                                                 
 dense_9 (Dense)             (None, 100)               25700     
                                                                 
 dropout_14 (Dropout)        (None, 100)               0         
                                                                 
 dense_10 (Dense)            (None, 50)                5050      
                                                                 
 dropout_15 (Dropout)        (None, 50)                0         
                                                                 
 dense_11 (Dense)            (None, 15)                765       
                                                                 
=================================================================
Total params: 421,275
Trainable params: 420,315
Non-trainable params: 960
_________________________________________________________________

To improve the model, I added dropout to prevent overfitting, added BatchNormalization between layers to stabilize the distributions of layer inputs and MaxPooling2D to downsample the data and extract the most important information.

More layers were used and there are more neurons compared to the 37x37 model as there is more data to train on.

The learning rate of Adam was set to 0.00002 to help reduce volatility in the loss and accuracy scores.

This model has a total of 420,315 trainable parameters

In [37]:
history131 = model131.fit(X_train131, y_train131, validation_data=(X_validate131, y_validate131), epochs=250, batch_size=50, verbose=1, class_weight=class_weights)
Epoch 1/250
181/181 [==============================] - 15s 72ms/step - loss: 2.9153 - accuracy: 0.1256 - val_loss: 2.5867 - val_accuracy: 0.1423
Epoch 2/250
181/181 [==============================] - 12s 65ms/step - loss: 2.4182 - accuracy: 0.2059 - val_loss: 2.2730 - val_accuracy: 0.3317
Epoch 3/250
181/181 [==============================] - 12s 65ms/step - loss: 2.2032 - accuracy: 0.2864 - val_loss: 2.0299 - val_accuracy: 0.4260
Epoch 4/250
181/181 [==============================] - 12s 67ms/step - loss: 2.0492 - accuracy: 0.3475 - val_loss: 1.9246 - val_accuracy: 0.4677
Epoch 5/250
181/181 [==============================] - 12s 66ms/step - loss: 1.9394 - accuracy: 0.3841 - val_loss: 1.8143 - val_accuracy: 0.4870
Epoch 6/250
181/181 [==============================] - 12s 66ms/step - loss: 1.8540 - accuracy: 0.4161 - val_loss: 1.7413 - val_accuracy: 0.5080
Epoch 7/250
181/181 [==============================] - 12s 66ms/step - loss: 1.7551 - accuracy: 0.4432 - val_loss: 1.6793 - val_accuracy: 0.5220
Epoch 8/250
181/181 [==============================] - 12s 67ms/step - loss: 1.6883 - accuracy: 0.4622 - val_loss: 1.6284 - val_accuracy: 0.5287
Epoch 9/250
181/181 [==============================] - 12s 65ms/step - loss: 1.6256 - accuracy: 0.4895 - val_loss: 1.5315 - val_accuracy: 0.5537
Epoch 10/250
181/181 [==============================] - 12s 66ms/step - loss: 1.5770 - accuracy: 0.5042 - val_loss: 1.5185 - val_accuracy: 0.5707
Epoch 11/250
181/181 [==============================] - 12s 66ms/step - loss: 1.5039 - accuracy: 0.5261 - val_loss: 1.4792 - val_accuracy: 0.5803
Epoch 12/250
181/181 [==============================] - 12s 65ms/step - loss: 1.4671 - accuracy: 0.5362 - val_loss: 1.4178 - val_accuracy: 0.5913
Epoch 13/250
181/181 [==============================] - 12s 65ms/step - loss: 1.4161 - accuracy: 0.5528 - val_loss: 1.3908 - val_accuracy: 0.5843
Epoch 14/250
181/181 [==============================] - 12s 65ms/step - loss: 1.3772 - accuracy: 0.5621 - val_loss: 1.3489 - val_accuracy: 0.6063
Epoch 15/250
181/181 [==============================] - 12s 65ms/step - loss: 1.3212 - accuracy: 0.5917 - val_loss: 1.2876 - val_accuracy: 0.6197
Epoch 16/250
181/181 [==============================] - 12s 66ms/step - loss: 1.2954 - accuracy: 0.5937 - val_loss: 1.2895 - val_accuracy: 0.6270
Epoch 17/250
181/181 [==============================] - 12s 65ms/step - loss: 1.2585 - accuracy: 0.6068 - val_loss: 1.1573 - val_accuracy: 0.6713
Epoch 18/250
181/181 [==============================] - 12s 66ms/step - loss: 1.2283 - accuracy: 0.6180 - val_loss: 1.1430 - val_accuracy: 0.6723
Epoch 19/250
181/181 [==============================] - 12s 67ms/step - loss: 1.1682 - accuracy: 0.6391 - val_loss: 1.1094 - val_accuracy: 0.6790
Epoch 20/250
181/181 [==============================] - 12s 66ms/step - loss: 1.1388 - accuracy: 0.6467 - val_loss: 1.1462 - val_accuracy: 0.6647
Epoch 21/250
181/181 [==============================] - 12s 66ms/step - loss: 1.1120 - accuracy: 0.6543 - val_loss: 1.1116 - val_accuracy: 0.6780
Epoch 22/250
181/181 [==============================] - 12s 66ms/step - loss: 1.0631 - accuracy: 0.6680 - val_loss: 1.0428 - val_accuracy: 0.6873
Epoch 23/250
181/181 [==============================] - 12s 66ms/step - loss: 1.0412 - accuracy: 0.6769 - val_loss: 0.9997 - val_accuracy: 0.7087
Epoch 24/250
181/181 [==============================] - 12s 66ms/step - loss: 1.0010 - accuracy: 0.6941 - val_loss: 0.9536 - val_accuracy: 0.7200
Epoch 25/250
181/181 [==============================] - 12s 66ms/step - loss: 0.9839 - accuracy: 0.6981 - val_loss: 0.9921 - val_accuracy: 0.7027
Epoch 26/250
181/181 [==============================] - 12s 66ms/step - loss: 0.9543 - accuracy: 0.7036 - val_loss: 1.0071 - val_accuracy: 0.7067
Epoch 27/250
181/181 [==============================] - 12s 66ms/step - loss: 0.9427 - accuracy: 0.7097 - val_loss: 0.8660 - val_accuracy: 0.7430
Epoch 28/250
181/181 [==============================] - 12s 67ms/step - loss: 0.9155 - accuracy: 0.7213 - val_loss: 0.8247 - val_accuracy: 0.7533
Epoch 29/250
181/181 [==============================] - 12s 66ms/step - loss: 0.8892 - accuracy: 0.7253 - val_loss: 0.9258 - val_accuracy: 0.7283
Epoch 30/250
181/181 [==============================] - 12s 66ms/step - loss: 0.8706 - accuracy: 0.7398 - val_loss: 0.8443 - val_accuracy: 0.7470
Epoch 31/250
181/181 [==============================] - 12s 66ms/step - loss: 0.8287 - accuracy: 0.7452 - val_loss: 0.8297 - val_accuracy: 0.7587
Epoch 32/250
181/181 [==============================] - 12s 66ms/step - loss: 0.8206 - accuracy: 0.7552 - val_loss: 0.8875 - val_accuracy: 0.7343
Epoch 33/250
181/181 [==============================] - 12s 66ms/step - loss: 0.7998 - accuracy: 0.7580 - val_loss: 0.8938 - val_accuracy: 0.7453
Epoch 34/250
181/181 [==============================] - 12s 66ms/step - loss: 0.7808 - accuracy: 0.7599 - val_loss: 0.8555 - val_accuracy: 0.7500
Epoch 35/250
181/181 [==============================] - 12s 66ms/step - loss: 0.7479 - accuracy: 0.7693 - val_loss: 0.8419 - val_accuracy: 0.7583
Epoch 36/250
181/181 [==============================] - 12s 65ms/step - loss: 0.7343 - accuracy: 0.7788 - val_loss: 0.9190 - val_accuracy: 0.7463
Epoch 37/250
181/181 [==============================] - 12s 66ms/step - loss: 0.7091 - accuracy: 0.7838 - val_loss: 0.8643 - val_accuracy: 0.7567
Epoch 38/250
181/181 [==============================] - 12s 66ms/step - loss: 0.6878 - accuracy: 0.7914 - val_loss: 0.8292 - val_accuracy: 0.7717
Epoch 39/250
181/181 [==============================] - 12s 66ms/step - loss: 0.6883 - accuracy: 0.7952 - val_loss: 0.8440 - val_accuracy: 0.7637
Epoch 40/250
181/181 [==============================] - 12s 66ms/step - loss: 0.6618 - accuracy: 0.7983 - val_loss: 0.7769 - val_accuracy: 0.7757
Epoch 41/250
181/181 [==============================] - 12s 66ms/step - loss: 0.6464 - accuracy: 0.8064 - val_loss: 0.7341 - val_accuracy: 0.7963
Epoch 42/250
181/181 [==============================] - 12s 66ms/step - loss: 0.6209 - accuracy: 0.8129 - val_loss: 0.6522 - val_accuracy: 0.8070
Epoch 43/250
181/181 [==============================] - 12s 66ms/step - loss: 0.6115 - accuracy: 0.8161 - val_loss: 0.6843 - val_accuracy: 0.8073
Epoch 44/250
181/181 [==============================] - 12s 66ms/step - loss: 0.5834 - accuracy: 0.8209 - val_loss: 0.6176 - val_accuracy: 0.8170
Epoch 45/250
181/181 [==============================] - 12s 66ms/step - loss: 0.5970 - accuracy: 0.8181 - val_loss: 0.8373 - val_accuracy: 0.7770
Epoch 46/250
181/181 [==============================] - 12s 66ms/step - loss: 0.5758 - accuracy: 0.8264 - val_loss: 0.7918 - val_accuracy: 0.7913
Epoch 47/250
181/181 [==============================] - 12s 66ms/step - loss: 0.5616 - accuracy: 0.8285 - val_loss: 0.7401 - val_accuracy: 0.8030
Epoch 48/250
181/181 [==============================] - 12s 66ms/step - loss: 0.5425 - accuracy: 0.8345 - val_loss: 0.6872 - val_accuracy: 0.8080
Epoch 49/250
181/181 [==============================] - 13s 74ms/step - loss: 0.5221 - accuracy: 0.8427 - val_loss: 0.6504 - val_accuracy: 0.8177
Epoch 50/250
181/181 [==============================] - 13s 74ms/step - loss: 0.5261 - accuracy: 0.8408 - val_loss: 0.6427 - val_accuracy: 0.8197
Epoch 51/250
181/181 [==============================] - 14s 75ms/step - loss: 0.5065 - accuracy: 0.8510 - val_loss: 0.6651 - val_accuracy: 0.8047
Epoch 52/250
181/181 [==============================] - 13s 70ms/step - loss: 0.4872 - accuracy: 0.8514 - val_loss: 0.6154 - val_accuracy: 0.8257
Epoch 53/250
181/181 [==============================] - 13s 69ms/step - loss: 0.4883 - accuracy: 0.8540 - val_loss: 0.8877 - val_accuracy: 0.7723
Epoch 54/250
181/181 [==============================] - 14s 76ms/step - loss: 0.4751 - accuracy: 0.8561 - val_loss: 0.6118 - val_accuracy: 0.8280
Epoch 55/250
181/181 [==============================] - 14s 76ms/step - loss: 0.4704 - accuracy: 0.8594 - val_loss: 0.6270 - val_accuracy: 0.8257
Epoch 56/250
181/181 [==============================] - 12s 67ms/step - loss: 0.4511 - accuracy: 0.8594 - val_loss: 0.6817 - val_accuracy: 0.8207
Epoch 57/250
181/181 [==============================] - 12s 68ms/step - loss: 0.4444 - accuracy: 0.8622 - val_loss: 0.7208 - val_accuracy: 0.8050
Epoch 58/250
181/181 [==============================] - 12s 68ms/step - loss: 0.4284 - accuracy: 0.8730 - val_loss: 0.5406 - val_accuracy: 0.8427
Epoch 59/250
181/181 [==============================] - 12s 67ms/step - loss: 0.4194 - accuracy: 0.8725 - val_loss: 0.5852 - val_accuracy: 0.8333
Epoch 60/250
181/181 [==============================] - 12s 66ms/step - loss: 0.4089 - accuracy: 0.8761 - val_loss: 0.4864 - val_accuracy: 0.8587
Epoch 61/250
181/181 [==============================] - 12s 66ms/step - loss: 0.4009 - accuracy: 0.8801 - val_loss: 0.6067 - val_accuracy: 0.8293
Epoch 62/250
181/181 [==============================] - 12s 66ms/step - loss: 0.4111 - accuracy: 0.8764 - val_loss: 0.6105 - val_accuracy: 0.8380
Epoch 63/250
181/181 [==============================] - 12s 66ms/step - loss: 0.3670 - accuracy: 0.8922 - val_loss: 0.5486 - val_accuracy: 0.8377
Epoch 64/250
181/181 [==============================] - 12s 66ms/step - loss: 0.3770 - accuracy: 0.8872 - val_loss: 0.6101 - val_accuracy: 0.8400
Epoch 65/250
181/181 [==============================] - 12s 66ms/step - loss: 0.3715 - accuracy: 0.8915 - val_loss: 0.5351 - val_accuracy: 0.8433
Epoch 66/250
181/181 [==============================] - 12s 66ms/step - loss: 0.3576 - accuracy: 0.8922 - val_loss: 0.6309 - val_accuracy: 0.8267
Epoch 67/250
181/181 [==============================] - 12s 67ms/step - loss: 0.3619 - accuracy: 0.8917 - val_loss: 0.4872 - val_accuracy: 0.8600
Epoch 68/250
181/181 [==============================] - 12s 66ms/step - loss: 0.3407 - accuracy: 0.8999 - val_loss: 0.7035 - val_accuracy: 0.8177
Epoch 69/250
181/181 [==============================] - 12s 66ms/step - loss: 0.3439 - accuracy: 0.8983 - val_loss: 0.4507 - val_accuracy: 0.8717
Epoch 70/250
181/181 [==============================] - 12s 66ms/step - loss: 0.3472 - accuracy: 0.8974 - val_loss: 0.5648 - val_accuracy: 0.8370
Epoch 71/250
181/181 [==============================] - 12s 67ms/step - loss: 0.3421 - accuracy: 0.9008 - val_loss: 0.5573 - val_accuracy: 0.8467
Epoch 72/250
181/181 [==============================] - 12s 66ms/step - loss: 0.3224 - accuracy: 0.9052 - val_loss: 0.7721 - val_accuracy: 0.8133
Epoch 73/250
181/181 [==============================] - 12s 67ms/step - loss: 0.3001 - accuracy: 0.9120 - val_loss: 0.6911 - val_accuracy: 0.8240
Epoch 74/250
181/181 [==============================] - 12s 67ms/step - loss: 0.3045 - accuracy: 0.9095 - val_loss: 0.7951 - val_accuracy: 0.8137
Epoch 75/250
181/181 [==============================] - 12s 66ms/step - loss: 0.3089 - accuracy: 0.9069 - val_loss: 0.5939 - val_accuracy: 0.8487
Epoch 76/250
181/181 [==============================] - 12s 66ms/step - loss: 0.2833 - accuracy: 0.9177 - val_loss: 0.5346 - val_accuracy: 0.8633
Epoch 77/250
181/181 [==============================] - 12s 66ms/step - loss: 0.2844 - accuracy: 0.9180 - val_loss: 0.7947 - val_accuracy: 0.8093
Epoch 78/250
181/181 [==============================] - 12s 66ms/step - loss: 0.2812 - accuracy: 0.9104 - val_loss: 0.5027 - val_accuracy: 0.8717
Epoch 79/250
181/181 [==============================] - 12s 66ms/step - loss: 0.2733 - accuracy: 0.9187 - val_loss: 0.4787 - val_accuracy: 0.8670
Epoch 80/250
181/181 [==============================] - 12s 67ms/step - loss: 0.2796 - accuracy: 0.9176 - val_loss: 0.4891 - val_accuracy: 0.8717
Epoch 81/250
181/181 [==============================] - 12s 65ms/step - loss: 0.2646 - accuracy: 0.9215 - val_loss: 0.5059 - val_accuracy: 0.8687
Epoch 82/250
181/181 [==============================] - 12s 65ms/step - loss: 0.2598 - accuracy: 0.9228 - val_loss: 0.7155 - val_accuracy: 0.8343
Epoch 83/250
181/181 [==============================] - 12s 65ms/step - loss: 0.2610 - accuracy: 0.9202 - val_loss: 0.5429 - val_accuracy: 0.8693
Epoch 84/250
181/181 [==============================] - 12s 66ms/step - loss: 0.2500 - accuracy: 0.9246 - val_loss: 0.4672 - val_accuracy: 0.8787
Epoch 85/250
181/181 [==============================] - 12s 68ms/step - loss: 0.2569 - accuracy: 0.9245 - val_loss: 0.5181 - val_accuracy: 0.8657
Epoch 86/250
181/181 [==============================] - 12s 67ms/step - loss: 0.2321 - accuracy: 0.9315 - val_loss: 0.6347 - val_accuracy: 0.8500
Epoch 87/250
181/181 [==============================] - 12s 67ms/step - loss: 0.2424 - accuracy: 0.9275 - val_loss: 0.6152 - val_accuracy: 0.8567
Epoch 88/250
181/181 [==============================] - 12s 68ms/step - loss: 0.2373 - accuracy: 0.9297 - val_loss: 0.6741 - val_accuracy: 0.8523
Epoch 89/250
181/181 [==============================] - 12s 68ms/step - loss: 0.2316 - accuracy: 0.9312 - val_loss: 0.5802 - val_accuracy: 0.8623
Epoch 90/250
181/181 [==============================] - 13s 69ms/step - loss: 0.2256 - accuracy: 0.9308 - val_loss: 0.7258 - val_accuracy: 0.8460
Epoch 91/250
181/181 [==============================] - 13s 73ms/step - loss: 0.2123 - accuracy: 0.9366 - val_loss: 0.5842 - val_accuracy: 0.8587
Epoch 92/250
181/181 [==============================] - 14s 79ms/step - loss: 0.2244 - accuracy: 0.9315 - val_loss: 0.8845 - val_accuracy: 0.8127
Epoch 93/250
181/181 [==============================] - 13s 70ms/step - loss: 0.2023 - accuracy: 0.9390 - val_loss: 0.4141 - val_accuracy: 0.9040
Epoch 94/250
181/181 [==============================] - 12s 66ms/step - loss: 0.2107 - accuracy: 0.9355 - val_loss: 0.4527 - val_accuracy: 0.8800
Epoch 95/250
181/181 [==============================] - 13s 69ms/step - loss: 0.2064 - accuracy: 0.9395 - val_loss: 0.5732 - val_accuracy: 0.8703
Epoch 96/250
181/181 [==============================] - 12s 67ms/step - loss: 0.2014 - accuracy: 0.9412 - val_loss: 0.5401 - val_accuracy: 0.8697
Epoch 97/250
181/181 [==============================] - 12s 68ms/step - loss: 0.1961 - accuracy: 0.9405 - val_loss: 0.5899 - val_accuracy: 0.8573
Epoch 98/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1887 - accuracy: 0.9429 - val_loss: 0.8183 - val_accuracy: 0.8240
Epoch 99/250
181/181 [==============================] - 12s 65ms/step - loss: 0.1905 - accuracy: 0.9415 - val_loss: 0.5417 - val_accuracy: 0.8737
Epoch 100/250
181/181 [==============================] - 12s 68ms/step - loss: 0.1815 - accuracy: 0.9439 - val_loss: 0.5382 - val_accuracy: 0.8660
Epoch 101/250
181/181 [==============================] - 12s 65ms/step - loss: 0.1868 - accuracy: 0.9440 - val_loss: 0.4288 - val_accuracy: 0.8913
Epoch 102/250
181/181 [==============================] - 12s 69ms/step - loss: 0.1865 - accuracy: 0.9462 - val_loss: 0.3672 - val_accuracy: 0.8950
Epoch 103/250
181/181 [==============================] - 12s 69ms/step - loss: 0.1749 - accuracy: 0.9472 - val_loss: 0.3908 - val_accuracy: 0.8920
Epoch 104/250
181/181 [==============================] - 12s 69ms/step - loss: 0.1734 - accuracy: 0.9476 - val_loss: 0.3395 - val_accuracy: 0.9050
Epoch 105/250
181/181 [==============================] - 13s 69ms/step - loss: 0.1635 - accuracy: 0.9513 - val_loss: 0.7188 - val_accuracy: 0.8570
Epoch 106/250
181/181 [==============================] - 12s 65ms/step - loss: 0.1737 - accuracy: 0.9485 - val_loss: 0.7378 - val_accuracy: 0.8403
Epoch 107/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1702 - accuracy: 0.9481 - val_loss: 0.6533 - val_accuracy: 0.8690
Epoch 108/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1579 - accuracy: 0.9543 - val_loss: 0.5021 - val_accuracy: 0.8920
Epoch 109/250
181/181 [==============================] - 12s 65ms/step - loss: 0.1532 - accuracy: 0.9544 - val_loss: 0.5839 - val_accuracy: 0.8767
Epoch 110/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1466 - accuracy: 0.9580 - val_loss: 0.8633 - val_accuracy: 0.8383
Epoch 111/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1554 - accuracy: 0.9545 - val_loss: 0.5398 - val_accuracy: 0.8720
Epoch 112/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1545 - accuracy: 0.9556 - val_loss: 0.4877 - val_accuracy: 0.8993
Epoch 113/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1592 - accuracy: 0.9532 - val_loss: 0.8030 - val_accuracy: 0.8447
Epoch 114/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1407 - accuracy: 0.9583 - val_loss: 0.5016 - val_accuracy: 0.8970
Epoch 115/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1413 - accuracy: 0.9599 - val_loss: 0.4632 - val_accuracy: 0.8987
Epoch 116/250
181/181 [==============================] - 12s 69ms/step - loss: 0.1403 - accuracy: 0.9591 - val_loss: 0.9914 - val_accuracy: 0.8227
Epoch 117/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1392 - accuracy: 0.9598 - val_loss: 0.6935 - val_accuracy: 0.8467
Epoch 118/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1331 - accuracy: 0.9622 - val_loss: 0.5855 - val_accuracy: 0.8683
Epoch 119/250
181/181 [==============================] - 12s 68ms/step - loss: 0.1317 - accuracy: 0.9600 - val_loss: 0.4733 - val_accuracy: 0.8873
Epoch 120/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1333 - accuracy: 0.9621 - val_loss: 0.7373 - val_accuracy: 0.8473
Epoch 121/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1245 - accuracy: 0.9632 - val_loss: 0.5314 - val_accuracy: 0.8867
Epoch 122/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1279 - accuracy: 0.9601 - val_loss: 0.4780 - val_accuracy: 0.8990
Epoch 123/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1278 - accuracy: 0.9637 - val_loss: 0.5959 - val_accuracy: 0.8657
Epoch 124/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1143 - accuracy: 0.9662 - val_loss: 0.5936 - val_accuracy: 0.8753
Epoch 125/250
181/181 [==============================] - 12s 69ms/step - loss: 0.1173 - accuracy: 0.9653 - val_loss: 0.3150 - val_accuracy: 0.9133
Epoch 126/250
181/181 [==============================] - 12s 68ms/step - loss: 0.1211 - accuracy: 0.9666 - val_loss: 0.9200 - val_accuracy: 0.8490
Epoch 127/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1093 - accuracy: 0.9656 - val_loss: 0.4130 - val_accuracy: 0.8997
Epoch 128/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1137 - accuracy: 0.9674 - val_loss: 0.4624 - val_accuracy: 0.8910
Epoch 129/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1077 - accuracy: 0.9671 - val_loss: 0.3373 - val_accuracy: 0.9187
Epoch 130/250
181/181 [==============================] - 13s 73ms/step - loss: 0.1144 - accuracy: 0.9662 - val_loss: 0.4053 - val_accuracy: 0.8970
Epoch 131/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1161 - accuracy: 0.9658 - val_loss: 0.3902 - val_accuracy: 0.9107
Epoch 132/250
181/181 [==============================] - 12s 66ms/step - loss: 0.1096 - accuracy: 0.9686 - val_loss: 0.4627 - val_accuracy: 0.8960
Epoch 133/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1051 - accuracy: 0.9689 - val_loss: 0.4770 - val_accuracy: 0.8940
Epoch 134/250
181/181 [==============================] - 12s 65ms/step - loss: 0.1059 - accuracy: 0.9689 - val_loss: 0.5569 - val_accuracy: 0.9023
Epoch 135/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1087 - accuracy: 0.9682 - val_loss: 0.3280 - val_accuracy: 0.9220
Epoch 136/250
181/181 [==============================] - 12s 67ms/step - loss: 0.1095 - accuracy: 0.9670 - val_loss: 0.5025 - val_accuracy: 0.9083
Epoch 137/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0961 - accuracy: 0.9721 - val_loss: 0.6748 - val_accuracy: 0.8813
Epoch 138/250
181/181 [==============================] - 12s 68ms/step - loss: 0.0944 - accuracy: 0.9733 - val_loss: 0.3598 - val_accuracy: 0.9113
Epoch 139/250
181/181 [==============================] - 12s 69ms/step - loss: 0.0931 - accuracy: 0.9729 - val_loss: 0.5766 - val_accuracy: 0.8920
Epoch 140/250
181/181 [==============================] - 12s 68ms/step - loss: 0.0958 - accuracy: 0.9697 - val_loss: 1.0622 - val_accuracy: 0.8163
Epoch 141/250
181/181 [==============================] - 12s 69ms/step - loss: 0.0925 - accuracy: 0.9720 - val_loss: 0.8291 - val_accuracy: 0.8687
Epoch 142/250
181/181 [==============================] - 12s 69ms/step - loss: 0.0981 - accuracy: 0.9704 - val_loss: 0.5607 - val_accuracy: 0.8857
Epoch 143/250
181/181 [==============================] - 12s 69ms/step - loss: 0.0947 - accuracy: 0.9704 - val_loss: 0.5048 - val_accuracy: 0.9047
Epoch 144/250
181/181 [==============================] - 12s 69ms/step - loss: 0.0921 - accuracy: 0.9738 - val_loss: 0.6070 - val_accuracy: 0.8877
Epoch 145/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0857 - accuracy: 0.9748 - val_loss: 0.6137 - val_accuracy: 0.8703
Epoch 146/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0988 - accuracy: 0.9721 - val_loss: 0.4098 - val_accuracy: 0.9087
Epoch 147/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0842 - accuracy: 0.9746 - val_loss: 0.3080 - val_accuracy: 0.9300
Epoch 148/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0826 - accuracy: 0.9754 - val_loss: 0.3130 - val_accuracy: 0.9273
Epoch 149/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0836 - accuracy: 0.9738 - val_loss: 0.4542 - val_accuracy: 0.9090
Epoch 150/250
181/181 [==============================] - 12s 68ms/step - loss: 0.0799 - accuracy: 0.9759 - val_loss: 0.6015 - val_accuracy: 0.9020
Epoch 151/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0808 - accuracy: 0.9767 - val_loss: 0.5588 - val_accuracy: 0.8960
Epoch 152/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0763 - accuracy: 0.9775 - val_loss: 0.3475 - val_accuracy: 0.9183
Epoch 153/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0765 - accuracy: 0.9782 - val_loss: 0.5935 - val_accuracy: 0.8983
Epoch 154/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0781 - accuracy: 0.9771 - val_loss: 0.4580 - val_accuracy: 0.9093
Epoch 155/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0774 - accuracy: 0.9762 - val_loss: 0.2890 - val_accuracy: 0.9273
Epoch 156/250
181/181 [==============================] - 12s 68ms/step - loss: 0.0772 - accuracy: 0.9783 - val_loss: 0.3963 - val_accuracy: 0.9090
Epoch 157/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0775 - accuracy: 0.9773 - val_loss: 0.6118 - val_accuracy: 0.8857
Epoch 158/250
181/181 [==============================] - 12s 68ms/step - loss: 0.0762 - accuracy: 0.9782 - val_loss: 0.3937 - val_accuracy: 0.9190
Epoch 159/250
181/181 [==============================] - 12s 68ms/step - loss: 0.0675 - accuracy: 0.9796 - val_loss: 0.1960 - val_accuracy: 0.9500
Epoch 160/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0649 - accuracy: 0.9813 - val_loss: 0.3884 - val_accuracy: 0.9160
Epoch 161/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0708 - accuracy: 0.9803 - val_loss: 0.4091 - val_accuracy: 0.9147
Epoch 162/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0653 - accuracy: 0.9805 - val_loss: 0.4718 - val_accuracy: 0.9107
Epoch 163/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0663 - accuracy: 0.9792 - val_loss: 0.3987 - val_accuracy: 0.9137
Epoch 164/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0680 - accuracy: 0.9808 - val_loss: 0.6744 - val_accuracy: 0.8683
Epoch 165/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0662 - accuracy: 0.9797 - val_loss: 0.4525 - val_accuracy: 0.9153
Epoch 166/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0622 - accuracy: 0.9829 - val_loss: 0.4516 - val_accuracy: 0.8977
Epoch 167/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0661 - accuracy: 0.9808 - val_loss: 0.5259 - val_accuracy: 0.9100
Epoch 168/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0574 - accuracy: 0.9835 - val_loss: 0.5651 - val_accuracy: 0.8917
Epoch 169/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0654 - accuracy: 0.9804 - val_loss: 0.4271 - val_accuracy: 0.9093
Epoch 170/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0627 - accuracy: 0.9812 - val_loss: 0.9507 - val_accuracy: 0.8340
Epoch 171/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0673 - accuracy: 0.9804 - val_loss: 0.3173 - val_accuracy: 0.9350
Epoch 172/250
181/181 [==============================] - 12s 68ms/step - loss: 0.0635 - accuracy: 0.9810 - val_loss: 0.4139 - val_accuracy: 0.9267
Epoch 173/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0563 - accuracy: 0.9826 - val_loss: 0.6258 - val_accuracy: 0.8747
Epoch 174/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0617 - accuracy: 0.9802 - val_loss: 0.5173 - val_accuracy: 0.8983
Epoch 175/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0565 - accuracy: 0.9843 - val_loss: 0.3797 - val_accuracy: 0.9237
Epoch 176/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0564 - accuracy: 0.9841 - val_loss: 0.3683 - val_accuracy: 0.9227
Epoch 177/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0555 - accuracy: 0.9838 - val_loss: 0.5919 - val_accuracy: 0.8933
Epoch 178/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0549 - accuracy: 0.9849 - val_loss: 0.5640 - val_accuracy: 0.8890
Epoch 179/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0527 - accuracy: 0.9843 - val_loss: 0.5736 - val_accuracy: 0.9013
Epoch 180/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0595 - accuracy: 0.9817 - val_loss: 0.5945 - val_accuracy: 0.8793
Epoch 181/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0536 - accuracy: 0.9841 - val_loss: 0.4467 - val_accuracy: 0.9137
Epoch 182/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0545 - accuracy: 0.9851 - val_loss: 0.9530 - val_accuracy: 0.8547
Epoch 183/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0516 - accuracy: 0.9849 - val_loss: 0.2889 - val_accuracy: 0.9360
Epoch 184/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0486 - accuracy: 0.9869 - val_loss: 0.6449 - val_accuracy: 0.8980
Epoch 185/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0502 - accuracy: 0.9853 - val_loss: 0.6621 - val_accuracy: 0.8857
Epoch 186/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0491 - accuracy: 0.9858 - val_loss: 0.3582 - val_accuracy: 0.9240
Epoch 187/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0472 - accuracy: 0.9858 - val_loss: 0.6302 - val_accuracy: 0.8883
Epoch 188/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0475 - accuracy: 0.9845 - val_loss: 0.5436 - val_accuracy: 0.9057
Epoch 189/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0504 - accuracy: 0.9851 - val_loss: 0.5596 - val_accuracy: 0.9110
Epoch 190/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0472 - accuracy: 0.9860 - val_loss: 0.4493 - val_accuracy: 0.9160
Epoch 191/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0514 - accuracy: 0.9858 - val_loss: 0.4998 - val_accuracy: 0.9087
Epoch 192/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0462 - accuracy: 0.9875 - val_loss: 0.4118 - val_accuracy: 0.9143
Epoch 193/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0475 - accuracy: 0.9858 - val_loss: 0.2825 - val_accuracy: 0.9433
Epoch 194/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0431 - accuracy: 0.9882 - val_loss: 0.2870 - val_accuracy: 0.9413
Epoch 195/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0410 - accuracy: 0.9882 - val_loss: 0.3675 - val_accuracy: 0.9263
Epoch 196/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0411 - accuracy: 0.9882 - val_loss: 0.3929 - val_accuracy: 0.9147
Epoch 197/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0445 - accuracy: 0.9878 - val_loss: 0.4360 - val_accuracy: 0.9133
Epoch 198/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0477 - accuracy: 0.9872 - val_loss: 0.1995 - val_accuracy: 0.9537
Epoch 199/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0447 - accuracy: 0.9866 - val_loss: 0.6345 - val_accuracy: 0.9003
Epoch 200/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0400 - accuracy: 0.9890 - val_loss: 0.3986 - val_accuracy: 0.9200
Epoch 201/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0401 - accuracy: 0.9893 - val_loss: 0.3807 - val_accuracy: 0.9237
Epoch 202/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0461 - accuracy: 0.9863 - val_loss: 0.4131 - val_accuracy: 0.9333
Epoch 203/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0387 - accuracy: 0.9903 - val_loss: 0.3569 - val_accuracy: 0.9273
Epoch 204/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0355 - accuracy: 0.9900 - val_loss: 0.4418 - val_accuracy: 0.9243
Epoch 205/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0391 - accuracy: 0.9891 - val_loss: 0.6313 - val_accuracy: 0.8850
Epoch 206/250
181/181 [==============================] - 12s 69ms/step - loss: 0.0417 - accuracy: 0.9888 - val_loss: 0.5215 - val_accuracy: 0.9023
Epoch 207/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0396 - accuracy: 0.9891 - val_loss: 0.3498 - val_accuracy: 0.9407
Epoch 208/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0378 - accuracy: 0.9893 - val_loss: 0.4351 - val_accuracy: 0.9177
Epoch 209/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0351 - accuracy: 0.9897 - val_loss: 0.3507 - val_accuracy: 0.9273
Epoch 210/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0363 - accuracy: 0.9890 - val_loss: 0.2423 - val_accuracy: 0.9477
Epoch 211/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0377 - accuracy: 0.9893 - val_loss: 0.3686 - val_accuracy: 0.9250
Epoch 212/250
181/181 [==============================] - 12s 67ms/step - loss: 0.0437 - accuracy: 0.9884 - val_loss: 0.2230 - val_accuracy: 0.9463
Epoch 213/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0345 - accuracy: 0.9907 - val_loss: 0.3226 - val_accuracy: 0.9340
Epoch 214/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0369 - accuracy: 0.9903 - val_loss: 0.3936 - val_accuracy: 0.9193
Epoch 215/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0330 - accuracy: 0.9900 - val_loss: 0.3864 - val_accuracy: 0.9180
Epoch 216/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0361 - accuracy: 0.9903 - val_loss: 0.2921 - val_accuracy: 0.9437
Epoch 217/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0387 - accuracy: 0.9886 - val_loss: 0.2048 - val_accuracy: 0.9563
Epoch 218/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0306 - accuracy: 0.9920 - val_loss: 0.2457 - val_accuracy: 0.9450
Epoch 219/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0330 - accuracy: 0.9905 - val_loss: 0.5573 - val_accuracy: 0.9183
Epoch 220/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0343 - accuracy: 0.9904 - val_loss: 0.3294 - val_accuracy: 0.9343
Epoch 221/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0357 - accuracy: 0.9896 - val_loss: 0.3053 - val_accuracy: 0.9363
Epoch 222/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0383 - accuracy: 0.9883 - val_loss: 0.3552 - val_accuracy: 0.9267
Epoch 223/250
181/181 [==============================] - 12s 69ms/step - loss: 0.0338 - accuracy: 0.9900 - val_loss: 0.2649 - val_accuracy: 0.9467
Epoch 224/250
181/181 [==============================] - 12s 69ms/step - loss: 0.0306 - accuracy: 0.9915 - val_loss: 0.2670 - val_accuracy: 0.9433
Epoch 225/250
181/181 [==============================] - 12s 68ms/step - loss: 0.0325 - accuracy: 0.9907 - val_loss: 0.3218 - val_accuracy: 0.9383
Epoch 226/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0293 - accuracy: 0.9916 - val_loss: 0.3286 - val_accuracy: 0.9367
Epoch 227/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0290 - accuracy: 0.9918 - val_loss: 0.4757 - val_accuracy: 0.9103
Epoch 228/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0304 - accuracy: 0.9914 - val_loss: 0.4357 - val_accuracy: 0.9280
Epoch 229/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0327 - accuracy: 0.9907 - val_loss: 0.3418 - val_accuracy: 0.9307
Epoch 230/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0301 - accuracy: 0.9922 - val_loss: 0.3323 - val_accuracy: 0.9313
Epoch 231/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0305 - accuracy: 0.9926 - val_loss: 0.3453 - val_accuracy: 0.9373
Epoch 232/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0325 - accuracy: 0.9893 - val_loss: 0.4085 - val_accuracy: 0.9220
Epoch 233/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0259 - accuracy: 0.9929 - val_loss: 0.5459 - val_accuracy: 0.9067
Epoch 234/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0274 - accuracy: 0.9916 - val_loss: 0.2580 - val_accuracy: 0.9480
Epoch 235/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0328 - accuracy: 0.9905 - val_loss: 0.6352 - val_accuracy: 0.9047
Epoch 236/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0330 - accuracy: 0.9904 - val_loss: 0.5738 - val_accuracy: 0.9020
Epoch 237/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0320 - accuracy: 0.9924 - val_loss: 0.5137 - val_accuracy: 0.9127
Epoch 238/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0300 - accuracy: 0.9922 - val_loss: 0.2877 - val_accuracy: 0.9340
Epoch 239/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0259 - accuracy: 0.9924 - val_loss: 0.5503 - val_accuracy: 0.9070
Epoch 240/250
181/181 [==============================] - 12s 64ms/step - loss: 0.0284 - accuracy: 0.9927 - val_loss: 0.3370 - val_accuracy: 0.9377
Epoch 241/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0330 - accuracy: 0.9906 - val_loss: 0.2369 - val_accuracy: 0.9467
Epoch 242/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0288 - accuracy: 0.9922 - val_loss: 0.2781 - val_accuracy: 0.9493
Epoch 243/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0260 - accuracy: 0.9932 - val_loss: 0.2055 - val_accuracy: 0.9537
Epoch 244/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0295 - accuracy: 0.9913 - val_loss: 0.5195 - val_accuracy: 0.9053
Epoch 245/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0246 - accuracy: 0.9937 - val_loss: 0.5165 - val_accuracy: 0.9147
Epoch 246/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0296 - accuracy: 0.9919 - val_loss: 0.4330 - val_accuracy: 0.9197
Epoch 247/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0256 - accuracy: 0.9926 - val_loss: 0.3955 - val_accuracy: 0.9407
Epoch 248/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0248 - accuracy: 0.9921 - val_loss: 0.2563 - val_accuracy: 0.9517
Epoch 249/250
181/181 [==============================] - 12s 66ms/step - loss: 0.0241 - accuracy: 0.9940 - val_loss: 0.3678 - val_accuracy: 0.9443
Epoch 250/250
181/181 [==============================] - 12s 65ms/step - loss: 0.0296 - accuracy: 0.9920 - val_loss: 0.6460 - val_accuracy: 0.8957
In [38]:
plt.figure()
plt.plot(history131.history["loss"])
plt.plot(history131.history["val_loss"])
plt.title('Model loss')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper right')
plt.show()
No description has been provided for this image

From the model loss graph, there is no overfitting of the model onto the training data.

In [39]:
plt.figure()
plt.plot(history131.history["accuracy"])
plt.plot(history131.history["val_accuracy"])
plt.title('Model Accuracy')
plt.ylabel('Loss')
plt.xlabel('Epoch')
plt.legend(['Train', 'Validation'], loc='upper right')
plt.show()
No description has been provided for this image
In [40]:
model131.save('model131.h5')
model131.save_weights('model131_weights.h5')
In [41]:
#import model and weights
from tensorflow.keras.models import load_model
from keras.utils.vis_utils import plot_model

model131 = load_model('model131.h5')

model131.load_weights('model131_weights.h5')

model131.summary()

y_pred = model131.predict(X_test131)

from sklearn.metrics import confusion_matrix

confusion_matrix(np.argmax(y_test131, axis=1), np.argmax(y_pred, axis=1))

# Graph the confusion matrix

import seaborn as sns
import pandas as pd

cm = confusion_matrix(np.argmax(y_test131, axis=1), np.argmax(y_pred, axis=1))

pd.options.display.float_format = '{:.2f}'.format

df_cm = pd.DataFrame(cm, index = [i for i in test_dataset_131.class_names],
                    columns = [i for i in test_dataset_131.class_names])

loss, accuracy = model131.evaluate(X_test131, y_test131)

plt.figure(figsize=(10,7))
sns.heatmap(df_cm, annot=True, fmt='d')
plt.xlabel('Predicted')
plt.ylabel('Actual')
plt.title(f'Image size 131\nLoss: {loss:.3f}, Accuracy: {accuracy:.3f}')
plt.show()

plot_model(model131, show_shapes=True, show_layer_names=True, show_layer_activations=True, expand_nested=True)
Model: "sequential_3"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 conv2d_9 (Conv2D)           (None, 129, 129, 32)      320       
                                                                 
 batch_normalization_9 (Batc  (None, 129, 129, 32)     128       
 hNormalization)                                                 
                                                                 
 max_pooling2d_6 (MaxPooling  (None, 64, 64, 32)       0         
 2D)                                                             
                                                                 
 conv2d_10 (Conv2D)          (None, 62, 62, 64)        18496     
                                                                 
 batch_normalization_10 (Bat  (None, 62, 62, 64)       256       
 chNormalization)                                                
                                                                 
 max_pooling2d_7 (MaxPooling  (None, 31, 31, 64)       0         
 2D)                                                             
                                                                 
 conv2d_11 (Conv2D)          (None, 29, 29, 128)       73856     
                                                                 
 batch_normalization_11 (Bat  (None, 29, 29, 128)      512       
 chNormalization)                                                
                                                                 
 dropout_12 (Dropout)        (None, 29, 29, 128)       0         
                                                                 
 max_pooling2d_8 (MaxPooling  (None, 14, 14, 128)      0         
 2D)                                                             
                                                                 
 conv2d_12 (Conv2D)          (None, 12, 12, 256)       295168    
                                                                 
 batch_normalization_12 (Bat  (None, 12, 12, 256)      1024      
 chNormalization)                                                
                                                                 
 dropout_13 (Dropout)        (None, 12, 12, 256)       0         
                                                                 
 max_pooling2d_9 (MaxPooling  (None, 6, 6, 256)        0         
 2D)                                                             
                                                                 
 global_average_pooling2d (G  (None, 256)              0         
 lobalAveragePooling2D)                                          
                                                                 
 dense_9 (Dense)             (None, 100)               25700     
                                                                 
 dropout_14 (Dropout)        (None, 100)               0         
                                                                 
 dense_10 (Dense)            (None, 50)                5050      
                                                                 
 dropout_15 (Dropout)        (None, 50)                0         
                                                                 
 dense_11 (Dense)            (None, 15)                765       
                                                                 
=================================================================
Total params: 421,275
Trainable params: 420,315
Non-trainable params: 960
_________________________________________________________________
94/94 [==============================] - 1s 13ms/step
94/94 [==============================] - 1s 10ms/step - loss: 0.6870 - accuracy: 0.8907
No description has been provided for this image
Out[41]:
No description has been provided for this image

The 131x131 Model has a loss of 0.687 and an accuracy of 89.1% when tested on the testing data.

From the confusion matrix, you can see that the model misclassifies a lot of data as carrots when it is not with Papaya being the most.